Files
gocryptotrader/database/models/postgres/datahistoryjob.go
Scott 48434dfd46 Feature: Candle conversion & Candle validation (#716)
* Remove old concept. Introduce new job types and candle scaling

* Adds extra processing, commands

* new concept for queued jobs. Jobs can pause. New commands to manage status

* =End of day commit designing tables and implementing prerequisites further.

* Adds postgres data history relations

* Fixes table design for sqlite. Fixes all issues from merge

* Fixes craziness of database design. Adds some functions to get related jobs

* Fixes errors

* Updates some documentation, manages prerequisite jobs a little better, adds rpc funcs

* Fixes database design and adjust repo functions

* Tests database relationship

* Test coverage of new job functions

* Finishes coverage of new functions

* Commands and RPC coverage

* New database modifications for new job types

* Adds db support of new columns. Adds conversion validation. lint

* command blurb changes

* Allows websocket test to pass consistently

* Fixes merge issue preventing datahistorymanager from starting via config

* Minor fixes for different job type processing

* Fixes rangeholder issue, fixes validation, does not address jobs not starting or wrong status

* Fixes database tests, but at what cost. Fixes dhm tests

* Fixes dhj completion issue. Adds prerequisite by nickname

* Fixes validation processing. Adds db tests and validation

* Fixes validation job processing range

* Fixes trade sql. Reduces defaults. Validation processing and errors

* Updates cli job commands. adds validation decimal. fix job validation

* Expands run job handling and tests

* Validation work

* Fixes validation processing

* candle relations. new job type. updating database design

* Adds secondary exchange support. Sets stage for candle override

* Re adds accidentally deleted relationship

* Updates loading and saving candles to have relationship data when relevant

* Now validates and replaces candle data appropriately

* Fixes getting and setting datahistory data. Neatens DHM

* Test coverage

* Updates proto for new db types. New test coverage. Secondary exchange work

* Investigation into never-ending validation jobs. Now that intervals are ruled out, now need to complete the job....

* Fixes issues with validation job completion. Fixes validation volume issue for secondary exchange

* Adds candle warning support to the backtester

* Fixes warnings

* lint and begin docs

* Documentation updates. Final testing changes

* Minor fixes

* docs, prerequisite checks, more testing

* Fixes binance trade test. Rename err

* Documentation fixes. Figure fixes

* documentation update

* Fixes remote PSQL tests

* Fix binance mock test

* Remove unnecessary JSON

* regen proto

* Some minor nit fixes

* Var usage, query sorting, log improving, sql mirroring

* Extra coverage

* Experimental removal of m.jobs and mutex. Fix messaging

* Fixes error

* Lint fixes, command description improvements. More isRunning gates

* description improvements

* Lint

* BUFF regenerate

* Rough concept to fix insertions taking up long periods of time

* New calculation for trade data. Adds batch saving

This also adds an experimental request feature to shut down lingering requests. However, its uncertain whether or not this is having any impact. Initially thought it was the trades that was taking time and not SQL. Will investigate further

* Removes experimental requester. Adds documentation. Fixes typo

* rm unused error

* re-adds more forgotten contributors

* Now with proper commit count
2021-08-05 10:27:27 +10:00

2612 lines
80 KiB
Go

// Code generated by SQLBoiler 3.5.0-gct (https://github.com/thrasher-corp/sqlboiler). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package postgres
import (
"context"
"database/sql"
"fmt"
"reflect"
"strconv"
"strings"
"sync"
"time"
"github.com/pkg/errors"
"github.com/thrasher-corp/sqlboiler/boil"
"github.com/thrasher-corp/sqlboiler/queries"
"github.com/thrasher-corp/sqlboiler/queries/qm"
"github.com/thrasher-corp/sqlboiler/queries/qmhelper"
"github.com/thrasher-corp/sqlboiler/strmangle"
"github.com/volatiletech/null"
)
// Datahistoryjob is an object representing the database table.
type Datahistoryjob struct {
ID string `boil:"id" json:"id" toml:"id" yaml:"id"`
Nickname string `boil:"nickname" json:"nickname" toml:"nickname" yaml:"nickname"`
ExchangeNameID string `boil:"exchange_name_id" json:"exchange_name_id" toml:"exchange_name_id" yaml:"exchange_name_id"`
Asset string `boil:"asset" json:"asset" toml:"asset" yaml:"asset"`
Base string `boil:"base" json:"base" toml:"base" yaml:"base"`
Quote string `boil:"quote" json:"quote" toml:"quote" yaml:"quote"`
StartTime time.Time `boil:"start_time" json:"start_time" toml:"start_time" yaml:"start_time"`
EndTime time.Time `boil:"end_time" json:"end_time" toml:"end_time" yaml:"end_time"`
DataType float64 `boil:"data_type" json:"data_type" toml:"data_type" yaml:"data_type"`
Interval float64 `boil:"interval" json:"interval" toml:"interval" yaml:"interval"`
RequestSize float64 `boil:"request_size" json:"request_size" toml:"request_size" yaml:"request_size"`
MaxRetries float64 `boil:"max_retries" json:"max_retries" toml:"max_retries" yaml:"max_retries"`
BatchCount float64 `boil:"batch_count" json:"batch_count" toml:"batch_count" yaml:"batch_count"`
Status float64 `boil:"status" json:"status" toml:"status" yaml:"status"`
Created time.Time `boil:"created" json:"created" toml:"created" yaml:"created"`
ConversionInterval null.Float64 `boil:"conversion_interval" json:"conversion_interval,omitempty" toml:"conversion_interval" yaml:"conversion_interval,omitempty"`
OverwriteData null.Bool `boil:"overwrite_data" json:"overwrite_data,omitempty" toml:"overwrite_data" yaml:"overwrite_data,omitempty"`
DecimalPlaceComparison null.Int `boil:"decimal_place_comparison" json:"decimal_place_comparison,omitempty" toml:"decimal_place_comparison" yaml:"decimal_place_comparison,omitempty"`
SecondaryExchangeID null.String `boil:"secondary_exchange_id" json:"secondary_exchange_id,omitempty" toml:"secondary_exchange_id" yaml:"secondary_exchange_id,omitempty"`
IssueTolerancePercentage null.Float64 `boil:"issue_tolerance_percentage" json:"issue_tolerance_percentage,omitempty" toml:"issue_tolerance_percentage" yaml:"issue_tolerance_percentage,omitempty"`
ReplaceOnIssue null.Bool `boil:"replace_on_issue" json:"replace_on_issue,omitempty" toml:"replace_on_issue" yaml:"replace_on_issue,omitempty"`
R *datahistoryjobR `boil:"-" json:"-" toml:"-" yaml:"-"`
L datahistoryjobL `boil:"-" json:"-" toml:"-" yaml:"-"`
}
var DatahistoryjobColumns = struct {
ID string
Nickname string
ExchangeNameID string
Asset string
Base string
Quote string
StartTime string
EndTime string
DataType string
Interval string
RequestSize string
MaxRetries string
BatchCount string
Status string
Created string
ConversionInterval string
OverwriteData string
DecimalPlaceComparison string
SecondaryExchangeID string
IssueTolerancePercentage string
ReplaceOnIssue string
}{
ID: "id",
Nickname: "nickname",
ExchangeNameID: "exchange_name_id",
Asset: "asset",
Base: "base",
Quote: "quote",
StartTime: "start_time",
EndTime: "end_time",
DataType: "data_type",
Interval: "interval",
RequestSize: "request_size",
MaxRetries: "max_retries",
BatchCount: "batch_count",
Status: "status",
Created: "created",
ConversionInterval: "conversion_interval",
OverwriteData: "overwrite_data",
DecimalPlaceComparison: "decimal_place_comparison",
SecondaryExchangeID: "secondary_exchange_id",
IssueTolerancePercentage: "issue_tolerance_percentage",
ReplaceOnIssue: "replace_on_issue",
}
// Generated where
type whereHelpernull_Float64 struct{ field string }
func (w whereHelpernull_Float64) EQ(x null.Float64) qm.QueryMod {
return qmhelper.WhereNullEQ(w.field, false, x)
}
func (w whereHelpernull_Float64) NEQ(x null.Float64) qm.QueryMod {
return qmhelper.WhereNullEQ(w.field, true, x)
}
func (w whereHelpernull_Float64) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) }
func (w whereHelpernull_Float64) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) }
func (w whereHelpernull_Float64) LT(x null.Float64) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.LT, x)
}
func (w whereHelpernull_Float64) LTE(x null.Float64) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.LTE, x)
}
func (w whereHelpernull_Float64) GT(x null.Float64) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.GT, x)
}
func (w whereHelpernull_Float64) GTE(x null.Float64) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.GTE, x)
}
type whereHelpernull_Bool struct{ field string }
func (w whereHelpernull_Bool) EQ(x null.Bool) qm.QueryMod {
return qmhelper.WhereNullEQ(w.field, false, x)
}
func (w whereHelpernull_Bool) NEQ(x null.Bool) qm.QueryMod {
return qmhelper.WhereNullEQ(w.field, true, x)
}
func (w whereHelpernull_Bool) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) }
func (w whereHelpernull_Bool) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) }
func (w whereHelpernull_Bool) LT(x null.Bool) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.LT, x)
}
func (w whereHelpernull_Bool) LTE(x null.Bool) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.LTE, x)
}
func (w whereHelpernull_Bool) GT(x null.Bool) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.GT, x)
}
func (w whereHelpernull_Bool) GTE(x null.Bool) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.GTE, x)
}
type whereHelpernull_Int struct{ field string }
func (w whereHelpernull_Int) EQ(x null.Int) qm.QueryMod {
return qmhelper.WhereNullEQ(w.field, false, x)
}
func (w whereHelpernull_Int) NEQ(x null.Int) qm.QueryMod {
return qmhelper.WhereNullEQ(w.field, true, x)
}
func (w whereHelpernull_Int) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) }
func (w whereHelpernull_Int) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) }
func (w whereHelpernull_Int) LT(x null.Int) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.LT, x)
}
func (w whereHelpernull_Int) LTE(x null.Int) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.LTE, x)
}
func (w whereHelpernull_Int) GT(x null.Int) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.GT, x)
}
func (w whereHelpernull_Int) GTE(x null.Int) qm.QueryMod {
return qmhelper.Where(w.field, qmhelper.GTE, x)
}
var DatahistoryjobWhere = struct {
ID whereHelperstring
Nickname whereHelperstring
ExchangeNameID whereHelperstring
Asset whereHelperstring
Base whereHelperstring
Quote whereHelperstring
StartTime whereHelpertime_Time
EndTime whereHelpertime_Time
DataType whereHelperfloat64
Interval whereHelperfloat64
RequestSize whereHelperfloat64
MaxRetries whereHelperfloat64
BatchCount whereHelperfloat64
Status whereHelperfloat64
Created whereHelpertime_Time
ConversionInterval whereHelpernull_Float64
OverwriteData whereHelpernull_Bool
DecimalPlaceComparison whereHelpernull_Int
SecondaryExchangeID whereHelpernull_String
IssueTolerancePercentage whereHelpernull_Float64
ReplaceOnIssue whereHelpernull_Bool
}{
ID: whereHelperstring{field: "\"datahistoryjob\".\"id\""},
Nickname: whereHelperstring{field: "\"datahistoryjob\".\"nickname\""},
ExchangeNameID: whereHelperstring{field: "\"datahistoryjob\".\"exchange_name_id\""},
Asset: whereHelperstring{field: "\"datahistoryjob\".\"asset\""},
Base: whereHelperstring{field: "\"datahistoryjob\".\"base\""},
Quote: whereHelperstring{field: "\"datahistoryjob\".\"quote\""},
StartTime: whereHelpertime_Time{field: "\"datahistoryjob\".\"start_time\""},
EndTime: whereHelpertime_Time{field: "\"datahistoryjob\".\"end_time\""},
DataType: whereHelperfloat64{field: "\"datahistoryjob\".\"data_type\""},
Interval: whereHelperfloat64{field: "\"datahistoryjob\".\"interval\""},
RequestSize: whereHelperfloat64{field: "\"datahistoryjob\".\"request_size\""},
MaxRetries: whereHelperfloat64{field: "\"datahistoryjob\".\"max_retries\""},
BatchCount: whereHelperfloat64{field: "\"datahistoryjob\".\"batch_count\""},
Status: whereHelperfloat64{field: "\"datahistoryjob\".\"status\""},
Created: whereHelpertime_Time{field: "\"datahistoryjob\".\"created\""},
ConversionInterval: whereHelpernull_Float64{field: "\"datahistoryjob\".\"conversion_interval\""},
OverwriteData: whereHelpernull_Bool{field: "\"datahistoryjob\".\"overwrite_data\""},
DecimalPlaceComparison: whereHelpernull_Int{field: "\"datahistoryjob\".\"decimal_place_comparison\""},
SecondaryExchangeID: whereHelpernull_String{field: "\"datahistoryjob\".\"secondary_exchange_id\""},
IssueTolerancePercentage: whereHelpernull_Float64{field: "\"datahistoryjob\".\"issue_tolerance_percentage\""},
ReplaceOnIssue: whereHelpernull_Bool{field: "\"datahistoryjob\".\"replace_on_issue\""},
}
// DatahistoryjobRels is where relationship names are stored.
var DatahistoryjobRels = struct {
ExchangeName string
SecondaryExchange string
SourceJobCandles string
ValidationJobCandles string
PrerequisiteJobDatahistoryjobs string
JobDatahistoryjobs string
JobDatahistoryjobresults string
}{
ExchangeName: "ExchangeName",
SecondaryExchange: "SecondaryExchange",
SourceJobCandles: "SourceJobCandles",
ValidationJobCandles: "ValidationJobCandles",
PrerequisiteJobDatahistoryjobs: "PrerequisiteJobDatahistoryjobs",
JobDatahistoryjobs: "JobDatahistoryjobs",
JobDatahistoryjobresults: "JobDatahistoryjobresults",
}
// datahistoryjobR is where relationships are stored.
type datahistoryjobR struct {
ExchangeName *Exchange
SecondaryExchange *Exchange
SourceJobCandles CandleSlice
ValidationJobCandles CandleSlice
PrerequisiteJobDatahistoryjobs DatahistoryjobSlice
JobDatahistoryjobs DatahistoryjobSlice
JobDatahistoryjobresults DatahistoryjobresultSlice
}
// NewStruct creates a new relationship struct
func (*datahistoryjobR) NewStruct() *datahistoryjobR {
return &datahistoryjobR{}
}
// datahistoryjobL is where Load methods for each relationship are stored.
type datahistoryjobL struct{}
var (
datahistoryjobAllColumns = []string{"id", "nickname", "exchange_name_id", "asset", "base", "quote", "start_time", "end_time", "data_type", "interval", "request_size", "max_retries", "batch_count", "status", "created", "conversion_interval", "overwrite_data", "decimal_place_comparison", "secondary_exchange_id", "issue_tolerance_percentage", "replace_on_issue"}
datahistoryjobColumnsWithoutDefault = []string{"nickname", "exchange_name_id", "asset", "base", "quote", "start_time", "end_time", "data_type", "interval", "request_size", "max_retries", "batch_count", "status", "created", "conversion_interval", "overwrite_data", "decimal_place_comparison", "secondary_exchange_id", "issue_tolerance_percentage", "replace_on_issue"}
datahistoryjobColumnsWithDefault = []string{"id"}
datahistoryjobPrimaryKeyColumns = []string{"id"}
)
type (
// DatahistoryjobSlice is an alias for a slice of pointers to Datahistoryjob.
// This should generally be used opposed to []Datahistoryjob.
DatahistoryjobSlice []*Datahistoryjob
// DatahistoryjobHook is the signature for custom Datahistoryjob hook methods
DatahistoryjobHook func(context.Context, boil.ContextExecutor, *Datahistoryjob) error
datahistoryjobQuery struct {
*queries.Query
}
)
// Cache for insert, update and upsert
var (
datahistoryjobType = reflect.TypeOf(&Datahistoryjob{})
datahistoryjobMapping = queries.MakeStructMapping(datahistoryjobType)
datahistoryjobPrimaryKeyMapping, _ = queries.BindMapping(datahistoryjobType, datahistoryjobMapping, datahistoryjobPrimaryKeyColumns)
datahistoryjobInsertCacheMut sync.RWMutex
datahistoryjobInsertCache = make(map[string]insertCache)
datahistoryjobUpdateCacheMut sync.RWMutex
datahistoryjobUpdateCache = make(map[string]updateCache)
datahistoryjobUpsertCacheMut sync.RWMutex
datahistoryjobUpsertCache = make(map[string]insertCache)
)
var (
// Force time package dependency for automated UpdatedAt/CreatedAt.
_ = time.Second
// Force qmhelper dependency for where clause generation (which doesn't
// always happen)
_ = qmhelper.Where
)
var datahistoryjobBeforeInsertHooks []DatahistoryjobHook
var datahistoryjobBeforeUpdateHooks []DatahistoryjobHook
var datahistoryjobBeforeDeleteHooks []DatahistoryjobHook
var datahistoryjobBeforeUpsertHooks []DatahistoryjobHook
var datahistoryjobAfterInsertHooks []DatahistoryjobHook
var datahistoryjobAfterSelectHooks []DatahistoryjobHook
var datahistoryjobAfterUpdateHooks []DatahistoryjobHook
var datahistoryjobAfterDeleteHooks []DatahistoryjobHook
var datahistoryjobAfterUpsertHooks []DatahistoryjobHook
// doBeforeInsertHooks executes all "before insert" hooks.
func (o *Datahistoryjob) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobBeforeInsertHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doBeforeUpdateHooks executes all "before Update" hooks.
func (o *Datahistoryjob) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobBeforeUpdateHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doBeforeDeleteHooks executes all "before Delete" hooks.
func (o *Datahistoryjob) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobBeforeDeleteHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doBeforeUpsertHooks executes all "before Upsert" hooks.
func (o *Datahistoryjob) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobBeforeUpsertHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doAfterInsertHooks executes all "after Insert" hooks.
func (o *Datahistoryjob) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobAfterInsertHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doAfterSelectHooks executes all "after Select" hooks.
func (o *Datahistoryjob) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobAfterSelectHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doAfterUpdateHooks executes all "after Update" hooks.
func (o *Datahistoryjob) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobAfterUpdateHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doAfterDeleteHooks executes all "after Delete" hooks.
func (o *Datahistoryjob) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobAfterDeleteHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// doAfterUpsertHooks executes all "after Upsert" hooks.
func (o *Datahistoryjob) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {
if boil.HooksAreSkipped(ctx) {
return nil
}
for _, hook := range datahistoryjobAfterUpsertHooks {
if err := hook(ctx, exec, o); err != nil {
return err
}
}
return nil
}
// AddDatahistoryjobHook registers your hook function for all future operations.
func AddDatahistoryjobHook(hookPoint boil.HookPoint, datahistoryjobHook DatahistoryjobHook) {
switch hookPoint {
case boil.BeforeInsertHook:
datahistoryjobBeforeInsertHooks = append(datahistoryjobBeforeInsertHooks, datahistoryjobHook)
case boil.BeforeUpdateHook:
datahistoryjobBeforeUpdateHooks = append(datahistoryjobBeforeUpdateHooks, datahistoryjobHook)
case boil.BeforeDeleteHook:
datahistoryjobBeforeDeleteHooks = append(datahistoryjobBeforeDeleteHooks, datahistoryjobHook)
case boil.BeforeUpsertHook:
datahistoryjobBeforeUpsertHooks = append(datahistoryjobBeforeUpsertHooks, datahistoryjobHook)
case boil.AfterInsertHook:
datahistoryjobAfterInsertHooks = append(datahistoryjobAfterInsertHooks, datahistoryjobHook)
case boil.AfterSelectHook:
datahistoryjobAfterSelectHooks = append(datahistoryjobAfterSelectHooks, datahistoryjobHook)
case boil.AfterUpdateHook:
datahistoryjobAfterUpdateHooks = append(datahistoryjobAfterUpdateHooks, datahistoryjobHook)
case boil.AfterDeleteHook:
datahistoryjobAfterDeleteHooks = append(datahistoryjobAfterDeleteHooks, datahistoryjobHook)
case boil.AfterUpsertHook:
datahistoryjobAfterUpsertHooks = append(datahistoryjobAfterUpsertHooks, datahistoryjobHook)
}
}
// One returns a single datahistoryjob record from the query.
func (q datahistoryjobQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Datahistoryjob, error) {
o := &Datahistoryjob{}
queries.SetLimit(q.Query, 1)
err := q.Bind(ctx, exec, o)
if err != nil {
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
}
return nil, errors.Wrap(err, "postgres: failed to execute a one query for datahistoryjob")
}
if err := o.doAfterSelectHooks(ctx, exec); err != nil {
return o, err
}
return o, nil
}
// All returns all Datahistoryjob records from the query.
func (q datahistoryjobQuery) All(ctx context.Context, exec boil.ContextExecutor) (DatahistoryjobSlice, error) {
var o []*Datahistoryjob
err := q.Bind(ctx, exec, &o)
if err != nil {
return nil, errors.Wrap(err, "postgres: failed to assign all query results to Datahistoryjob slice")
}
if len(datahistoryjobAfterSelectHooks) != 0 {
for _, obj := range o {
if err := obj.doAfterSelectHooks(ctx, exec); err != nil {
return o, err
}
}
}
return o, nil
}
// Count returns the count of all Datahistoryjob records in the query.
func (q datahistoryjobQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) {
var count int64
queries.SetSelect(q.Query, nil)
queries.SetCount(q.Query)
err := q.Query.QueryRowContext(ctx, exec).Scan(&count)
if err != nil {
return 0, errors.Wrap(err, "postgres: failed to count datahistoryjob rows")
}
return count, nil
}
// Exists checks if the row exists in the table.
func (q datahistoryjobQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) {
var count int64
queries.SetSelect(q.Query, nil)
queries.SetCount(q.Query)
queries.SetLimit(q.Query, 1)
err := q.Query.QueryRowContext(ctx, exec).Scan(&count)
if err != nil {
return false, errors.Wrap(err, "postgres: failed to check if datahistoryjob exists")
}
return count > 0, nil
}
// ExchangeName pointed to by the foreign key.
func (o *Datahistoryjob) ExchangeName(mods ...qm.QueryMod) exchangeQuery {
queryMods := []qm.QueryMod{
qm.Where("\"id\" = ?", o.ExchangeNameID),
}
queryMods = append(queryMods, mods...)
query := Exchanges(queryMods...)
queries.SetFrom(query.Query, "\"exchange\"")
return query
}
// SecondaryExchange pointed to by the foreign key.
func (o *Datahistoryjob) SecondaryExchange(mods ...qm.QueryMod) exchangeQuery {
queryMods := []qm.QueryMod{
qm.Where("\"id\" = ?", o.SecondaryExchangeID),
}
queryMods = append(queryMods, mods...)
query := Exchanges(queryMods...)
queries.SetFrom(query.Query, "\"exchange\"")
return query
}
// SourceJobCandles retrieves all the candle's Candles with an executor via source_job_id column.
func (o *Datahistoryjob) SourceJobCandles(mods ...qm.QueryMod) candleQuery {
var queryMods []qm.QueryMod
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
queryMods = append(queryMods,
qm.Where("\"candle\".\"source_job_id\"=?", o.ID),
)
query := Candles(queryMods...)
queries.SetFrom(query.Query, "\"candle\"")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"\"candle\".*"})
}
return query
}
// ValidationJobCandles retrieves all the candle's Candles with an executor via validation_job_id column.
func (o *Datahistoryjob) ValidationJobCandles(mods ...qm.QueryMod) candleQuery {
var queryMods []qm.QueryMod
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
queryMods = append(queryMods,
qm.Where("\"candle\".\"validation_job_id\"=?", o.ID),
)
query := Candles(queryMods...)
queries.SetFrom(query.Query, "\"candle\"")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"\"candle\".*"})
}
return query
}
// PrerequisiteJobDatahistoryjobs retrieves all the datahistoryjob's Datahistoryjobs with an executor via id column.
func (o *Datahistoryjob) PrerequisiteJobDatahistoryjobs(mods ...qm.QueryMod) datahistoryjobQuery {
var queryMods []qm.QueryMod
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
queryMods = append(queryMods,
qm.InnerJoin("\"datahistoryjobrelations\" on \"datahistoryjob\".\"id\" = \"datahistoryjobrelations\".\"prerequisite_job_id\""),
qm.Where("\"datahistoryjobrelations\".\"job_id\"=?", o.ID),
)
query := Datahistoryjobs(queryMods...)
queries.SetFrom(query.Query, "\"datahistoryjob\"")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"\"datahistoryjob\".*"})
}
return query
}
// JobDatahistoryjobs retrieves all the datahistoryjob's Datahistoryjobs with an executor via id column.
func (o *Datahistoryjob) JobDatahistoryjobs(mods ...qm.QueryMod) datahistoryjobQuery {
var queryMods []qm.QueryMod
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
queryMods = append(queryMods,
qm.InnerJoin("\"datahistoryjobrelations\" on \"datahistoryjob\".\"id\" = \"datahistoryjobrelations\".\"job_id\""),
qm.Where("\"datahistoryjobrelations\".\"prerequisite_job_id\"=?", o.ID),
)
query := Datahistoryjobs(queryMods...)
queries.SetFrom(query.Query, "\"datahistoryjob\"")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"\"datahistoryjob\".*"})
}
return query
}
// JobDatahistoryjobresults retrieves all the datahistoryjobresult's Datahistoryjobresults with an executor via job_id column.
func (o *Datahistoryjob) JobDatahistoryjobresults(mods ...qm.QueryMod) datahistoryjobresultQuery {
var queryMods []qm.QueryMod
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
queryMods = append(queryMods,
qm.Where("\"datahistoryjobresult\".\"job_id\"=?", o.ID),
)
query := Datahistoryjobresults(queryMods...)
queries.SetFrom(query.Query, "\"datahistoryjobresult\"")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"\"datahistoryjobresult\".*"})
}
return query
}
// LoadExchangeName allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for an N-1 relationship.
func (datahistoryjobL) LoadExchangeName(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
args = append(args, object.ExchangeNameID)
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if a == obj.ExchangeNameID {
continue Outer
}
}
args = append(args, obj.ExchangeNameID)
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(qm.From(`exchange`), qm.WhereIn(`exchange.id in ?`, args...))
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load Exchange")
}
var resultSlice []*Exchange
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice Exchange")
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results of eager load for exchange")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for exchange")
}
if len(datahistoryjobAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if len(resultSlice) == 0 {
return nil
}
if singular {
foreign := resultSlice[0]
object.R.ExchangeName = foreign
if foreign.R == nil {
foreign.R = &exchangeR{}
}
foreign.R.ExchangeNameDatahistoryjobs = append(foreign.R.ExchangeNameDatahistoryjobs, object)
return nil
}
for _, local := range slice {
for _, foreign := range resultSlice {
if local.ExchangeNameID == foreign.ID {
local.R.ExchangeName = foreign
if foreign.R == nil {
foreign.R = &exchangeR{}
}
foreign.R.ExchangeNameDatahistoryjobs = append(foreign.R.ExchangeNameDatahistoryjobs, local)
break
}
}
}
return nil
}
// LoadSecondaryExchange allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for an N-1 relationship.
func (datahistoryjobL) LoadSecondaryExchange(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
if !queries.IsNil(object.SecondaryExchangeID) {
args = append(args, object.SecondaryExchangeID)
}
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if queries.Equal(a, obj.SecondaryExchangeID) {
continue Outer
}
}
if !queries.IsNil(obj.SecondaryExchangeID) {
args = append(args, obj.SecondaryExchangeID)
}
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(qm.From(`exchange`), qm.WhereIn(`exchange.id in ?`, args...))
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load Exchange")
}
var resultSlice []*Exchange
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice Exchange")
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results of eager load for exchange")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for exchange")
}
if len(datahistoryjobAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if len(resultSlice) == 0 {
return nil
}
if singular {
foreign := resultSlice[0]
object.R.SecondaryExchange = foreign
if foreign.R == nil {
foreign.R = &exchangeR{}
}
foreign.R.SecondaryExchangeDatahistoryjobs = append(foreign.R.SecondaryExchangeDatahistoryjobs, object)
return nil
}
for _, local := range slice {
for _, foreign := range resultSlice {
if queries.Equal(local.SecondaryExchangeID, foreign.ID) {
local.R.SecondaryExchange = foreign
if foreign.R == nil {
foreign.R = &exchangeR{}
}
foreign.R.SecondaryExchangeDatahistoryjobs = append(foreign.R.SecondaryExchangeDatahistoryjobs, local)
break
}
}
}
return nil
}
// LoadSourceJobCandles allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for a 1-M or N-M relationship.
func (datahistoryjobL) LoadSourceJobCandles(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
args = append(args, object.ID)
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if queries.Equal(a, obj.ID) {
continue Outer
}
}
args = append(args, obj.ID)
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(qm.From(`candle`), qm.WhereIn(`candle.source_job_id in ?`, args...))
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load candle")
}
var resultSlice []*Candle
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice candle")
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results in eager load on candle")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for candle")
}
if len(candleAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if singular {
object.R.SourceJobCandles = resultSlice
for _, foreign := range resultSlice {
if foreign.R == nil {
foreign.R = &candleR{}
}
foreign.R.SourceJob = object
}
return nil
}
for _, foreign := range resultSlice {
for _, local := range slice {
if queries.Equal(local.ID, foreign.SourceJobID) {
local.R.SourceJobCandles = append(local.R.SourceJobCandles, foreign)
if foreign.R == nil {
foreign.R = &candleR{}
}
foreign.R.SourceJob = local
break
}
}
}
return nil
}
// LoadValidationJobCandles allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for a 1-M or N-M relationship.
func (datahistoryjobL) LoadValidationJobCandles(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
args = append(args, object.ID)
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if queries.Equal(a, obj.ID) {
continue Outer
}
}
args = append(args, obj.ID)
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(qm.From(`candle`), qm.WhereIn(`candle.validation_job_id in ?`, args...))
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load candle")
}
var resultSlice []*Candle
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice candle")
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results in eager load on candle")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for candle")
}
if len(candleAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if singular {
object.R.ValidationJobCandles = resultSlice
for _, foreign := range resultSlice {
if foreign.R == nil {
foreign.R = &candleR{}
}
foreign.R.ValidationJob = object
}
return nil
}
for _, foreign := range resultSlice {
for _, local := range slice {
if queries.Equal(local.ID, foreign.ValidationJobID) {
local.R.ValidationJobCandles = append(local.R.ValidationJobCandles, foreign)
if foreign.R == nil {
foreign.R = &candleR{}
}
foreign.R.ValidationJob = local
break
}
}
}
return nil
}
// LoadPrerequisiteJobDatahistoryjobs allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for a 1-M or N-M relationship.
func (datahistoryjobL) LoadPrerequisiteJobDatahistoryjobs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
args = append(args, object.ID)
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if a == obj.ID {
continue Outer
}
}
args = append(args, obj.ID)
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(
qm.Select("\"datahistoryjob\".*, \"a\".\"job_id\""),
qm.From("\"datahistoryjob\""),
qm.InnerJoin("\"datahistoryjobrelations\" as \"a\" on \"datahistoryjob\".\"id\" = \"a\".\"prerequisite_job_id\""),
qm.WhereIn("\"a\".\"job_id\" in ?", args...),
)
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load datahistoryjob")
}
var resultSlice []*Datahistoryjob
var localJoinCols []string
for results.Next() {
one := new(Datahistoryjob)
var localJoinCol string
err = results.Scan(&one.ID, &one.Nickname, &one.ExchangeNameID, &one.Asset, &one.Base, &one.Quote, &one.StartTime, &one.EndTime, &one.DataType, &one.Interval, &one.RequestSize, &one.MaxRetries, &one.BatchCount, &one.Status, &one.Created, &one.ConversionInterval, &one.OverwriteData, &one.DecimalPlaceComparison, &one.SecondaryExchangeID, &one.IssueTolerancePercentage, &one.ReplaceOnIssue, &localJoinCol)
if err != nil {
return errors.Wrap(err, "failed to scan eager loaded results for datahistoryjob")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "failed to plebian-bind eager loaded slice datahistoryjob")
}
resultSlice = append(resultSlice, one)
localJoinCols = append(localJoinCols, localJoinCol)
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results in eager load on datahistoryjob")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for datahistoryjob")
}
if len(datahistoryjobAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if singular {
object.R.PrerequisiteJobDatahistoryjobs = resultSlice
for _, foreign := range resultSlice {
if foreign.R == nil {
foreign.R = &datahistoryjobR{}
}
foreign.R.JobDatahistoryjobs = append(foreign.R.JobDatahistoryjobs, object)
}
return nil
}
for i, foreign := range resultSlice {
localJoinCol := localJoinCols[i]
for _, local := range slice {
if local.ID == localJoinCol {
local.R.PrerequisiteJobDatahistoryjobs = append(local.R.PrerequisiteJobDatahistoryjobs, foreign)
if foreign.R == nil {
foreign.R = &datahistoryjobR{}
}
foreign.R.JobDatahistoryjobs = append(foreign.R.JobDatahistoryjobs, local)
break
}
}
}
return nil
}
// LoadJobDatahistoryjobs allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for a 1-M or N-M relationship.
func (datahistoryjobL) LoadJobDatahistoryjobs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
args = append(args, object.ID)
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if a == obj.ID {
continue Outer
}
}
args = append(args, obj.ID)
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(
qm.Select("\"datahistoryjob\".*, \"a\".\"prerequisite_job_id\""),
qm.From("\"datahistoryjob\""),
qm.InnerJoin("\"datahistoryjobrelations\" as \"a\" on \"datahistoryjob\".\"id\" = \"a\".\"job_id\""),
qm.WhereIn("\"a\".\"prerequisite_job_id\" in ?", args...),
)
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load datahistoryjob")
}
var resultSlice []*Datahistoryjob
var localJoinCols []string
for results.Next() {
one := new(Datahistoryjob)
var localJoinCol string
err = results.Scan(&one.ID, &one.Nickname, &one.ExchangeNameID, &one.Asset, &one.Base, &one.Quote, &one.StartTime, &one.EndTime, &one.DataType, &one.Interval, &one.RequestSize, &one.MaxRetries, &one.BatchCount, &one.Status, &one.Created, &one.ConversionInterval, &one.OverwriteData, &one.DecimalPlaceComparison, &one.SecondaryExchangeID, &one.IssueTolerancePercentage, &one.ReplaceOnIssue, &localJoinCol)
if err != nil {
return errors.Wrap(err, "failed to scan eager loaded results for datahistoryjob")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "failed to plebian-bind eager loaded slice datahistoryjob")
}
resultSlice = append(resultSlice, one)
localJoinCols = append(localJoinCols, localJoinCol)
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results in eager load on datahistoryjob")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for datahistoryjob")
}
if len(datahistoryjobAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if singular {
object.R.JobDatahistoryjobs = resultSlice
for _, foreign := range resultSlice {
if foreign.R == nil {
foreign.R = &datahistoryjobR{}
}
foreign.R.PrerequisiteJobDatahistoryjobs = append(foreign.R.PrerequisiteJobDatahistoryjobs, object)
}
return nil
}
for i, foreign := range resultSlice {
localJoinCol := localJoinCols[i]
for _, local := range slice {
if local.ID == localJoinCol {
local.R.JobDatahistoryjobs = append(local.R.JobDatahistoryjobs, foreign)
if foreign.R == nil {
foreign.R = &datahistoryjobR{}
}
foreign.R.PrerequisiteJobDatahistoryjobs = append(foreign.R.PrerequisiteJobDatahistoryjobs, local)
break
}
}
}
return nil
}
// LoadJobDatahistoryjobresults allows an eager lookup of values, cached into the
// loaded structs of the objects. This is for a 1-M or N-M relationship.
func (datahistoryjobL) LoadJobDatahistoryjobresults(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDatahistoryjob interface{}, mods queries.Applicator) error {
var slice []*Datahistoryjob
var object *Datahistoryjob
if singular {
object = maybeDatahistoryjob.(*Datahistoryjob)
} else {
slice = *maybeDatahistoryjob.(*[]*Datahistoryjob)
}
args := make([]interface{}, 0, 1)
if singular {
if object.R == nil {
object.R = &datahistoryjobR{}
}
args = append(args, object.ID)
} else {
Outer:
for _, obj := range slice {
if obj.R == nil {
obj.R = &datahistoryjobR{}
}
for _, a := range args {
if a == obj.ID {
continue Outer
}
}
args = append(args, obj.ID)
}
}
if len(args) == 0 {
return nil
}
query := NewQuery(qm.From(`datahistoryjobresult`), qm.WhereIn(`datahistoryjobresult.job_id in ?`, args...))
if mods != nil {
mods.Apply(query)
}
results, err := query.QueryContext(ctx, e)
if err != nil {
return errors.Wrap(err, "failed to eager load datahistoryjobresult")
}
var resultSlice []*Datahistoryjobresult
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice datahistoryjobresult")
}
if err = results.Close(); err != nil {
return errors.Wrap(err, "failed to close results in eager load on datahistoryjobresult")
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "error occurred during iteration of eager loaded relations for datahistoryjobresult")
}
if len(datahistoryjobresultAfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(ctx, e); err != nil {
return err
}
}
}
if singular {
object.R.JobDatahistoryjobresults = resultSlice
for _, foreign := range resultSlice {
if foreign.R == nil {
foreign.R = &datahistoryjobresultR{}
}
foreign.R.Job = object
}
return nil
}
for _, foreign := range resultSlice {
for _, local := range slice {
if local.ID == foreign.JobID {
local.R.JobDatahistoryjobresults = append(local.R.JobDatahistoryjobresults, foreign)
if foreign.R == nil {
foreign.R = &datahistoryjobresultR{}
}
foreign.R.Job = local
break
}
}
}
return nil
}
// SetExchangeName of the datahistoryjob to the related item.
// Sets o.R.ExchangeName to related.
// Adds o to related.R.ExchangeNameDatahistoryjobs.
func (o *Datahistoryjob) SetExchangeName(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Exchange) error {
var err error
if insert {
if err = related.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
}
updateQuery := fmt.Sprintf(
"UPDATE \"datahistoryjob\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, []string{"exchange_name_id"}),
strmangle.WhereClause("\"", "\"", 2, datahistoryjobPrimaryKeyColumns),
)
values := []interface{}{related.ID, o.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, updateQuery)
fmt.Fprintln(boil.DebugWriter, values)
}
if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update local table")
}
o.ExchangeNameID = related.ID
if o.R == nil {
o.R = &datahistoryjobR{
ExchangeName: related,
}
} else {
o.R.ExchangeName = related
}
if related.R == nil {
related.R = &exchangeR{
ExchangeNameDatahistoryjobs: DatahistoryjobSlice{o},
}
} else {
related.R.ExchangeNameDatahistoryjobs = append(related.R.ExchangeNameDatahistoryjobs, o)
}
return nil
}
// SetSecondaryExchange of the datahistoryjob to the related item.
// Sets o.R.SecondaryExchange to related.
// Adds o to related.R.SecondaryExchangeDatahistoryjobs.
func (o *Datahistoryjob) SetSecondaryExchange(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Exchange) error {
var err error
if insert {
if err = related.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
}
updateQuery := fmt.Sprintf(
"UPDATE \"datahistoryjob\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, []string{"secondary_exchange_id"}),
strmangle.WhereClause("\"", "\"", 2, datahistoryjobPrimaryKeyColumns),
)
values := []interface{}{related.ID, o.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, updateQuery)
fmt.Fprintln(boil.DebugWriter, values)
}
if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update local table")
}
queries.Assign(&o.SecondaryExchangeID, related.ID)
if o.R == nil {
o.R = &datahistoryjobR{
SecondaryExchange: related,
}
} else {
o.R.SecondaryExchange = related
}
if related.R == nil {
related.R = &exchangeR{
SecondaryExchangeDatahistoryjobs: DatahistoryjobSlice{o},
}
} else {
related.R.SecondaryExchangeDatahistoryjobs = append(related.R.SecondaryExchangeDatahistoryjobs, o)
}
return nil
}
// RemoveSecondaryExchange relationship.
// Sets o.R.SecondaryExchange to nil.
// Removes o from all passed in related items' relationships struct (Optional).
func (o *Datahistoryjob) RemoveSecondaryExchange(ctx context.Context, exec boil.ContextExecutor, related *Exchange) error {
var err error
queries.SetScanner(&o.SecondaryExchangeID, nil)
if _, err = o.Update(ctx, exec, boil.Whitelist("secondary_exchange_id")); err != nil {
return errors.Wrap(err, "failed to update local table")
}
o.R.SecondaryExchange = nil
if related == nil || related.R == nil {
return nil
}
for i, ri := range related.R.SecondaryExchangeDatahistoryjobs {
if queries.Equal(o.SecondaryExchangeID, ri.SecondaryExchangeID) {
continue
}
ln := len(related.R.SecondaryExchangeDatahistoryjobs)
if ln > 1 && i < ln-1 {
related.R.SecondaryExchangeDatahistoryjobs[i] = related.R.SecondaryExchangeDatahistoryjobs[ln-1]
}
related.R.SecondaryExchangeDatahistoryjobs = related.R.SecondaryExchangeDatahistoryjobs[:ln-1]
break
}
return nil
}
// AddSourceJobCandles adds the given related objects to the existing relationships
// of the datahistoryjob, optionally inserting them as new records.
// Appends related to o.R.SourceJobCandles.
// Sets related.R.SourceJob appropriately.
func (o *Datahistoryjob) AddSourceJobCandles(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Candle) error {
var err error
for _, rel := range related {
if insert {
queries.Assign(&rel.SourceJobID, o.ID)
if err = rel.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
} else {
updateQuery := fmt.Sprintf(
"UPDATE \"candle\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, []string{"source_job_id"}),
strmangle.WhereClause("\"", "\"", 2, candlePrimaryKeyColumns),
)
values := []interface{}{o.ID, rel.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, updateQuery)
fmt.Fprintln(boil.DebugWriter, values)
}
if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
}
queries.Assign(&rel.SourceJobID, o.ID)
}
}
if o.R == nil {
o.R = &datahistoryjobR{
SourceJobCandles: related,
}
} else {
o.R.SourceJobCandles = append(o.R.SourceJobCandles, related...)
}
for _, rel := range related {
if rel.R == nil {
rel.R = &candleR{
SourceJob: o,
}
} else {
rel.R.SourceJob = o
}
}
return nil
}
// SetSourceJobCandles removes all previously related items of the
// datahistoryjob replacing them completely with the passed
// in related items, optionally inserting them as new records.
// Sets o.R.SourceJob's SourceJobCandles accordingly.
// Replaces o.R.SourceJobCandles with related.
// Sets related.R.SourceJob's SourceJobCandles accordingly.
func (o *Datahistoryjob) SetSourceJobCandles(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Candle) error {
query := "update \"candle\" set \"source_job_id\" = null where \"source_job_id\" = $1"
values := []interface{}{o.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err := exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
}
if o.R != nil {
for _, rel := range o.R.SourceJobCandles {
queries.SetScanner(&rel.SourceJobID, nil)
if rel.R == nil {
continue
}
rel.R.SourceJob = nil
}
o.R.SourceJobCandles = nil
}
return o.AddSourceJobCandles(ctx, exec, insert, related...)
}
// RemoveSourceJobCandles relationships from objects passed in.
// Removes related items from R.SourceJobCandles (uses pointer comparison, removal does not keep order)
// Sets related.R.SourceJob.
func (o *Datahistoryjob) RemoveSourceJobCandles(ctx context.Context, exec boil.ContextExecutor, related ...*Candle) error {
var err error
for _, rel := range related {
queries.SetScanner(&rel.SourceJobID, nil)
if rel.R != nil {
rel.R.SourceJob = nil
}
if _, err = rel.Update(ctx, exec, boil.Whitelist("source_job_id")); err != nil {
return err
}
}
if o.R == nil {
return nil
}
for _, rel := range related {
for i, ri := range o.R.SourceJobCandles {
if rel != ri {
continue
}
ln := len(o.R.SourceJobCandles)
if ln > 1 && i < ln-1 {
o.R.SourceJobCandles[i] = o.R.SourceJobCandles[ln-1]
}
o.R.SourceJobCandles = o.R.SourceJobCandles[:ln-1]
break
}
}
return nil
}
// AddValidationJobCandles adds the given related objects to the existing relationships
// of the datahistoryjob, optionally inserting them as new records.
// Appends related to o.R.ValidationJobCandles.
// Sets related.R.ValidationJob appropriately.
func (o *Datahistoryjob) AddValidationJobCandles(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Candle) error {
var err error
for _, rel := range related {
if insert {
queries.Assign(&rel.ValidationJobID, o.ID)
if err = rel.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
} else {
updateQuery := fmt.Sprintf(
"UPDATE \"candle\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, []string{"validation_job_id"}),
strmangle.WhereClause("\"", "\"", 2, candlePrimaryKeyColumns),
)
values := []interface{}{o.ID, rel.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, updateQuery)
fmt.Fprintln(boil.DebugWriter, values)
}
if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
}
queries.Assign(&rel.ValidationJobID, o.ID)
}
}
if o.R == nil {
o.R = &datahistoryjobR{
ValidationJobCandles: related,
}
} else {
o.R.ValidationJobCandles = append(o.R.ValidationJobCandles, related...)
}
for _, rel := range related {
if rel.R == nil {
rel.R = &candleR{
ValidationJob: o,
}
} else {
rel.R.ValidationJob = o
}
}
return nil
}
// SetValidationJobCandles removes all previously related items of the
// datahistoryjob replacing them completely with the passed
// in related items, optionally inserting them as new records.
// Sets o.R.ValidationJob's ValidationJobCandles accordingly.
// Replaces o.R.ValidationJobCandles with related.
// Sets related.R.ValidationJob's ValidationJobCandles accordingly.
func (o *Datahistoryjob) SetValidationJobCandles(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Candle) error {
query := "update \"candle\" set \"validation_job_id\" = null where \"validation_job_id\" = $1"
values := []interface{}{o.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err := exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
}
if o.R != nil {
for _, rel := range o.R.ValidationJobCandles {
queries.SetScanner(&rel.ValidationJobID, nil)
if rel.R == nil {
continue
}
rel.R.ValidationJob = nil
}
o.R.ValidationJobCandles = nil
}
return o.AddValidationJobCandles(ctx, exec, insert, related...)
}
// RemoveValidationJobCandles relationships from objects passed in.
// Removes related items from R.ValidationJobCandles (uses pointer comparison, removal does not keep order)
// Sets related.R.ValidationJob.
func (o *Datahistoryjob) RemoveValidationJobCandles(ctx context.Context, exec boil.ContextExecutor, related ...*Candle) error {
var err error
for _, rel := range related {
queries.SetScanner(&rel.ValidationJobID, nil)
if rel.R != nil {
rel.R.ValidationJob = nil
}
if _, err = rel.Update(ctx, exec, boil.Whitelist("validation_job_id")); err != nil {
return err
}
}
if o.R == nil {
return nil
}
for _, rel := range related {
for i, ri := range o.R.ValidationJobCandles {
if rel != ri {
continue
}
ln := len(o.R.ValidationJobCandles)
if ln > 1 && i < ln-1 {
o.R.ValidationJobCandles[i] = o.R.ValidationJobCandles[ln-1]
}
o.R.ValidationJobCandles = o.R.ValidationJobCandles[:ln-1]
break
}
}
return nil
}
// AddPrerequisiteJobDatahistoryjobs adds the given related objects to the existing relationships
// of the datahistoryjob, optionally inserting them as new records.
// Appends related to o.R.PrerequisiteJobDatahistoryjobs.
// Sets related.R.JobDatahistoryjobs appropriately.
func (o *Datahistoryjob) AddPrerequisiteJobDatahistoryjobs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Datahistoryjob) error {
var err error
for _, rel := range related {
if insert {
if err = rel.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
}
}
for _, rel := range related {
query := "insert into \"datahistoryjobrelations\" (\"job_id\", \"prerequisite_job_id\") values ($1, $2)"
values := []interface{}{o.ID, rel.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err = exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to insert into join table")
}
}
if o.R == nil {
o.R = &datahistoryjobR{
PrerequisiteJobDatahistoryjobs: related,
}
} else {
o.R.PrerequisiteJobDatahistoryjobs = append(o.R.PrerequisiteJobDatahistoryjobs, related...)
}
for _, rel := range related {
if rel.R == nil {
rel.R = &datahistoryjobR{
JobDatahistoryjobs: DatahistoryjobSlice{o},
}
} else {
rel.R.JobDatahistoryjobs = append(rel.R.JobDatahistoryjobs, o)
}
}
return nil
}
// SetPrerequisiteJobDatahistoryjobs removes all previously related items of the
// datahistoryjob replacing them completely with the passed
// in related items, optionally inserting them as new records.
// Sets o.R.JobDatahistoryjobs's PrerequisiteJobDatahistoryjobs accordingly.
// Replaces o.R.PrerequisiteJobDatahistoryjobs with related.
// Sets related.R.JobDatahistoryjobs's PrerequisiteJobDatahistoryjobs accordingly.
func (o *Datahistoryjob) SetPrerequisiteJobDatahistoryjobs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Datahistoryjob) error {
query := "delete from \"datahistoryjobrelations\" where \"job_id\" = $1"
values := []interface{}{o.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err := exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
}
removePrerequisiteJobDatahistoryjobsFromJobDatahistoryjobsSlice(o, related)
if o.R != nil {
o.R.PrerequisiteJobDatahistoryjobs = nil
}
return o.AddPrerequisiteJobDatahistoryjobs(ctx, exec, insert, related...)
}
// RemovePrerequisiteJobDatahistoryjobs relationships from objects passed in.
// Removes related items from R.PrerequisiteJobDatahistoryjobs (uses pointer comparison, removal does not keep order)
// Sets related.R.JobDatahistoryjobs.
func (o *Datahistoryjob) RemovePrerequisiteJobDatahistoryjobs(ctx context.Context, exec boil.ContextExecutor, related ...*Datahistoryjob) error {
var err error
query := fmt.Sprintf(
"delete from \"datahistoryjobrelations\" where \"job_id\" = $1 and \"prerequisite_job_id\" in (%s)",
strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1),
)
values := []interface{}{o.ID}
for _, rel := range related {
values = append(values, rel.ID)
}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err = exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
}
removePrerequisiteJobDatahistoryjobsFromJobDatahistoryjobsSlice(o, related)
if o.R == nil {
return nil
}
for _, rel := range related {
for i, ri := range o.R.PrerequisiteJobDatahistoryjobs {
if rel != ri {
continue
}
ln := len(o.R.PrerequisiteJobDatahistoryjobs)
if ln > 1 && i < ln-1 {
o.R.PrerequisiteJobDatahistoryjobs[i] = o.R.PrerequisiteJobDatahistoryjobs[ln-1]
}
o.R.PrerequisiteJobDatahistoryjobs = o.R.PrerequisiteJobDatahistoryjobs[:ln-1]
break
}
}
return nil
}
func removePrerequisiteJobDatahistoryjobsFromJobDatahistoryjobsSlice(o *Datahistoryjob, related []*Datahistoryjob) {
for _, rel := range related {
if rel.R == nil {
continue
}
for i, ri := range rel.R.JobDatahistoryjobs {
if o.ID != ri.ID {
continue
}
ln := len(rel.R.JobDatahistoryjobs)
if ln > 1 && i < ln-1 {
rel.R.JobDatahistoryjobs[i] = rel.R.JobDatahistoryjobs[ln-1]
}
rel.R.JobDatahistoryjobs = rel.R.JobDatahistoryjobs[:ln-1]
break
}
}
}
// AddJobDatahistoryjobs adds the given related objects to the existing relationships
// of the datahistoryjob, optionally inserting them as new records.
// Appends related to o.R.JobDatahistoryjobs.
// Sets related.R.PrerequisiteJobDatahistoryjobs appropriately.
func (o *Datahistoryjob) AddJobDatahistoryjobs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Datahistoryjob) error {
var err error
for _, rel := range related {
if insert {
if err = rel.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
}
}
for _, rel := range related {
query := "insert into \"datahistoryjobrelations\" (\"prerequisite_job_id\", \"job_id\") values ($1, $2)"
values := []interface{}{o.ID, rel.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err = exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to insert into join table")
}
}
if o.R == nil {
o.R = &datahistoryjobR{
JobDatahistoryjobs: related,
}
} else {
o.R.JobDatahistoryjobs = append(o.R.JobDatahistoryjobs, related...)
}
for _, rel := range related {
if rel.R == nil {
rel.R = &datahistoryjobR{
PrerequisiteJobDatahistoryjobs: DatahistoryjobSlice{o},
}
} else {
rel.R.PrerequisiteJobDatahistoryjobs = append(rel.R.PrerequisiteJobDatahistoryjobs, o)
}
}
return nil
}
// SetJobDatahistoryjobs removes all previously related items of the
// datahistoryjob replacing them completely with the passed
// in related items, optionally inserting them as new records.
// Sets o.R.PrerequisiteJobDatahistoryjobs's JobDatahistoryjobs accordingly.
// Replaces o.R.JobDatahistoryjobs with related.
// Sets related.R.PrerequisiteJobDatahistoryjobs's JobDatahistoryjobs accordingly.
func (o *Datahistoryjob) SetJobDatahistoryjobs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Datahistoryjob) error {
query := "delete from \"datahistoryjobrelations\" where \"prerequisite_job_id\" = $1"
values := []interface{}{o.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err := exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
}
removeJobDatahistoryjobsFromPrerequisiteJobDatahistoryjobsSlice(o, related)
if o.R != nil {
o.R.JobDatahistoryjobs = nil
}
return o.AddJobDatahistoryjobs(ctx, exec, insert, related...)
}
// RemoveJobDatahistoryjobs relationships from objects passed in.
// Removes related items from R.JobDatahistoryjobs (uses pointer comparison, removal does not keep order)
// Sets related.R.PrerequisiteJobDatahistoryjobs.
func (o *Datahistoryjob) RemoveJobDatahistoryjobs(ctx context.Context, exec boil.ContextExecutor, related ...*Datahistoryjob) error {
var err error
query := fmt.Sprintf(
"delete from \"datahistoryjobrelations\" where \"prerequisite_job_id\" = $1 and \"job_id\" in (%s)",
strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1),
)
values := []interface{}{o.ID}
for _, rel := range related {
values = append(values, rel.ID)
}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, query)
fmt.Fprintln(boil.DebugWriter, values)
}
_, err = exec.ExecContext(ctx, query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
}
removeJobDatahistoryjobsFromPrerequisiteJobDatahistoryjobsSlice(o, related)
if o.R == nil {
return nil
}
for _, rel := range related {
for i, ri := range o.R.JobDatahistoryjobs {
if rel != ri {
continue
}
ln := len(o.R.JobDatahistoryjobs)
if ln > 1 && i < ln-1 {
o.R.JobDatahistoryjobs[i] = o.R.JobDatahistoryjobs[ln-1]
}
o.R.JobDatahistoryjobs = o.R.JobDatahistoryjobs[:ln-1]
break
}
}
return nil
}
func removeJobDatahistoryjobsFromPrerequisiteJobDatahistoryjobsSlice(o *Datahistoryjob, related []*Datahistoryjob) {
for _, rel := range related {
if rel.R == nil {
continue
}
for i, ri := range rel.R.PrerequisiteJobDatahistoryjobs {
if o.ID != ri.ID {
continue
}
ln := len(rel.R.PrerequisiteJobDatahistoryjobs)
if ln > 1 && i < ln-1 {
rel.R.PrerequisiteJobDatahistoryjobs[i] = rel.R.PrerequisiteJobDatahistoryjobs[ln-1]
}
rel.R.PrerequisiteJobDatahistoryjobs = rel.R.PrerequisiteJobDatahistoryjobs[:ln-1]
break
}
}
}
// AddJobDatahistoryjobresults adds the given related objects to the existing relationships
// of the datahistoryjob, optionally inserting them as new records.
// Appends related to o.R.JobDatahistoryjobresults.
// Sets related.R.Job appropriately.
func (o *Datahistoryjob) AddJobDatahistoryjobresults(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Datahistoryjobresult) error {
var err error
for _, rel := range related {
if insert {
rel.JobID = o.ID
if err = rel.Insert(ctx, exec, boil.Infer()); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
}
} else {
updateQuery := fmt.Sprintf(
"UPDATE \"datahistoryjobresult\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, []string{"job_id"}),
strmangle.WhereClause("\"", "\"", 2, datahistoryjobresultPrimaryKeyColumns),
)
values := []interface{}{o.ID, rel.ID}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, updateQuery)
fmt.Fprintln(boil.DebugWriter, values)
}
if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
}
rel.JobID = o.ID
}
}
if o.R == nil {
o.R = &datahistoryjobR{
JobDatahistoryjobresults: related,
}
} else {
o.R.JobDatahistoryjobresults = append(o.R.JobDatahistoryjobresults, related...)
}
for _, rel := range related {
if rel.R == nil {
rel.R = &datahistoryjobresultR{
Job: o,
}
} else {
rel.R.Job = o
}
}
return nil
}
// Datahistoryjobs retrieves all the records using an executor.
func Datahistoryjobs(mods ...qm.QueryMod) datahistoryjobQuery {
mods = append(mods, qm.From("\"datahistoryjob\""))
return datahistoryjobQuery{NewQuery(mods...)}
}
// FindDatahistoryjob retrieves a single record by ID with an executor.
// If selectCols is empty Find will return all columns.
func FindDatahistoryjob(ctx context.Context, exec boil.ContextExecutor, iD string, selectCols ...string) (*Datahistoryjob, error) {
datahistoryjobObj := &Datahistoryjob{}
sel := "*"
if len(selectCols) > 0 {
sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",")
}
query := fmt.Sprintf(
"select %s from \"datahistoryjob\" where \"id\"=$1", sel,
)
q := queries.Raw(query, iD)
err := q.Bind(ctx, exec, datahistoryjobObj)
if err != nil {
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
}
return nil, errors.Wrap(err, "postgres: unable to select from datahistoryjob")
}
return datahistoryjobObj, nil
}
// Insert a single record using an executor.
// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts.
func (o *Datahistoryjob) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error {
if o == nil {
return errors.New("postgres: no datahistoryjob provided for insertion")
}
var err error
if err := o.doBeforeInsertHooks(ctx, exec); err != nil {
return err
}
nzDefaults := queries.NonZeroDefaultSet(datahistoryjobColumnsWithDefault, o)
key := makeCacheKey(columns, nzDefaults)
datahistoryjobInsertCacheMut.RLock()
cache, cached := datahistoryjobInsertCache[key]
datahistoryjobInsertCacheMut.RUnlock()
if !cached {
wl, returnColumns := columns.InsertColumnSet(
datahistoryjobAllColumns,
datahistoryjobColumnsWithDefault,
datahistoryjobColumnsWithoutDefault,
nzDefaults,
)
cache.valueMapping, err = queries.BindMapping(datahistoryjobType, datahistoryjobMapping, wl)
if err != nil {
return err
}
cache.retMapping, err = queries.BindMapping(datahistoryjobType, datahistoryjobMapping, returnColumns)
if err != nil {
return err
}
if len(wl) != 0 {
cache.query = fmt.Sprintf("INSERT INTO \"datahistoryjob\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1))
} else {
cache.query = "INSERT INTO \"datahistoryjob\" %sDEFAULT VALUES%s"
}
var queryOutput, queryReturning string
if len(cache.retMapping) != 0 {
queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\""))
}
cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning)
}
value := reflect.Indirect(reflect.ValueOf(o))
vals := queries.ValuesFromMapping(value, cache.valueMapping)
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, cache.query)
fmt.Fprintln(boil.DebugWriter, vals)
}
if len(cache.retMapping) != 0 {
err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
} else {
_, err = exec.ExecContext(ctx, cache.query, vals...)
}
if err != nil {
return errors.Wrap(err, "postgres: unable to insert into datahistoryjob")
}
if !cached {
datahistoryjobInsertCacheMut.Lock()
datahistoryjobInsertCache[key] = cache
datahistoryjobInsertCacheMut.Unlock()
}
return o.doAfterInsertHooks(ctx, exec)
}
// Update uses an executor to update the Datahistoryjob.
// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates.
// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records.
func (o *Datahistoryjob) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) {
var err error
if err = o.doBeforeUpdateHooks(ctx, exec); err != nil {
return 0, err
}
key := makeCacheKey(columns, nil)
datahistoryjobUpdateCacheMut.RLock()
cache, cached := datahistoryjobUpdateCache[key]
datahistoryjobUpdateCacheMut.RUnlock()
if !cached {
wl := columns.UpdateColumnSet(
datahistoryjobAllColumns,
datahistoryjobPrimaryKeyColumns,
)
if len(wl) == 0 {
return 0, errors.New("postgres: unable to update datahistoryjob, could not build whitelist")
}
cache.query = fmt.Sprintf("UPDATE \"datahistoryjob\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, wl),
strmangle.WhereClause("\"", "\"", len(wl)+1, datahistoryjobPrimaryKeyColumns),
)
cache.valueMapping, err = queries.BindMapping(datahistoryjobType, datahistoryjobMapping, append(wl, datahistoryjobPrimaryKeyColumns...))
if err != nil {
return 0, err
}
}
values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, cache.query)
fmt.Fprintln(boil.DebugWriter, values)
}
var result sql.Result
result, err = exec.ExecContext(ctx, cache.query, values...)
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to update datahistoryjob row")
}
rowsAff, err := result.RowsAffected()
if err != nil {
return 0, errors.Wrap(err, "postgres: failed to get rows affected by update for datahistoryjob")
}
if !cached {
datahistoryjobUpdateCacheMut.Lock()
datahistoryjobUpdateCache[key] = cache
datahistoryjobUpdateCacheMut.Unlock()
}
return rowsAff, o.doAfterUpdateHooks(ctx, exec)
}
// UpdateAll updates all rows with the specified column values.
func (q datahistoryjobQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {
queries.SetUpdate(q.Query, cols)
result, err := q.Query.ExecContext(ctx, exec)
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to update all for datahistoryjob")
}
rowsAff, err := result.RowsAffected()
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to retrieve rows affected for datahistoryjob")
}
return rowsAff, nil
}
// UpdateAll updates all rows with the specified column values, using an executor.
func (o DatahistoryjobSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) {
ln := int64(len(o))
if ln == 0 {
return 0, nil
}
if len(cols) == 0 {
return 0, errors.New("postgres: update all requires at least one column argument")
}
colNames := make([]string, len(cols))
args := make([]interface{}, len(cols))
i := 0
for name, value := range cols {
colNames[i] = name
args[i] = value
i++
}
// Append all of the primary key values for each column
for _, obj := range o {
pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), datahistoryjobPrimaryKeyMapping)
args = append(args, pkeyArgs...)
}
sql := fmt.Sprintf("UPDATE \"datahistoryjob\" SET %s WHERE %s",
strmangle.SetParamNames("\"", "\"", 1, colNames),
strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, datahistoryjobPrimaryKeyColumns, len(o)))
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, sql)
fmt.Fprintln(boil.DebugWriter, args...)
}
result, err := exec.ExecContext(ctx, sql, args...)
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to update all in datahistoryjob slice")
}
rowsAff, err := result.RowsAffected()
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to retrieve rows affected all in update all datahistoryjob")
}
return rowsAff, nil
}
// Upsert attempts an insert using an executor, and does an update or ignore on conflict.
// See boil.Columns documentation for how to properly use updateColumns and insertColumns.
func (o *Datahistoryjob) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {
if o == nil {
return errors.New("postgres: no datahistoryjob provided for upsert")
}
if err := o.doBeforeUpsertHooks(ctx, exec); err != nil {
return err
}
nzDefaults := queries.NonZeroDefaultSet(datahistoryjobColumnsWithDefault, o)
// Build cache key in-line uglily - mysql vs psql problems
buf := strmangle.GetBuffer()
if updateOnConflict {
buf.WriteByte('t')
} else {
buf.WriteByte('f')
}
buf.WriteByte('.')
for _, c := range conflictColumns {
buf.WriteString(c)
}
buf.WriteByte('.')
buf.WriteString(strconv.Itoa(updateColumns.Kind))
for _, c := range updateColumns.Cols {
buf.WriteString(c)
}
buf.WriteByte('.')
buf.WriteString(strconv.Itoa(insertColumns.Kind))
for _, c := range insertColumns.Cols {
buf.WriteString(c)
}
buf.WriteByte('.')
for _, c := range nzDefaults {
buf.WriteString(c)
}
key := buf.String()
strmangle.PutBuffer(buf)
datahistoryjobUpsertCacheMut.RLock()
cache, cached := datahistoryjobUpsertCache[key]
datahistoryjobUpsertCacheMut.RUnlock()
var err error
if !cached {
insert, ret := insertColumns.InsertColumnSet(
datahistoryjobAllColumns,
datahistoryjobColumnsWithDefault,
datahistoryjobColumnsWithoutDefault,
nzDefaults,
)
update := updateColumns.UpdateColumnSet(
datahistoryjobAllColumns,
datahistoryjobPrimaryKeyColumns,
)
if updateOnConflict && len(update) == 0 {
return errors.New("postgres: unable to upsert datahistoryjob, could not build update column list")
}
conflict := conflictColumns
if len(conflict) == 0 {
conflict = make([]string, len(datahistoryjobPrimaryKeyColumns))
copy(conflict, datahistoryjobPrimaryKeyColumns)
}
cache.query = buildUpsertQueryPostgres(dialect, "\"datahistoryjob\"", updateOnConflict, ret, update, conflict, insert)
cache.valueMapping, err = queries.BindMapping(datahistoryjobType, datahistoryjobMapping, insert)
if err != nil {
return err
}
if len(ret) != 0 {
cache.retMapping, err = queries.BindMapping(datahistoryjobType, datahistoryjobMapping, ret)
if err != nil {
return err
}
}
}
value := reflect.Indirect(reflect.ValueOf(o))
vals := queries.ValuesFromMapping(value, cache.valueMapping)
var returns []interface{}
if len(cache.retMapping) != 0 {
returns = queries.PtrsFromMapping(value, cache.retMapping)
}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, cache.query)
fmt.Fprintln(boil.DebugWriter, vals)
}
if len(cache.retMapping) != 0 {
err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)
if err == sql.ErrNoRows {
err = nil // Postgres doesn't return anything when there's no update
}
} else {
_, err = exec.ExecContext(ctx, cache.query, vals...)
}
if err != nil {
return errors.Wrap(err, "postgres: unable to upsert datahistoryjob")
}
if !cached {
datahistoryjobUpsertCacheMut.Lock()
datahistoryjobUpsertCache[key] = cache
datahistoryjobUpsertCacheMut.Unlock()
}
return o.doAfterUpsertHooks(ctx, exec)
}
// Delete deletes a single Datahistoryjob record with an executor.
// Delete will match against the primary key column to find the record to delete.
func (o *Datahistoryjob) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) {
if o == nil {
return 0, errors.New("postgres: no Datahistoryjob provided for delete")
}
if err := o.doBeforeDeleteHooks(ctx, exec); err != nil {
return 0, err
}
args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), datahistoryjobPrimaryKeyMapping)
sql := "DELETE FROM \"datahistoryjob\" WHERE \"id\"=$1"
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, sql)
fmt.Fprintln(boil.DebugWriter, args...)
}
result, err := exec.ExecContext(ctx, sql, args...)
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to delete from datahistoryjob")
}
rowsAff, err := result.RowsAffected()
if err != nil {
return 0, errors.Wrap(err, "postgres: failed to get rows affected by delete for datahistoryjob")
}
if err := o.doAfterDeleteHooks(ctx, exec); err != nil {
return 0, err
}
return rowsAff, nil
}
// DeleteAll deletes all matching rows.
func (q datahistoryjobQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) {
if q.Query == nil {
return 0, errors.New("postgres: no datahistoryjobQuery provided for delete all")
}
queries.SetDelete(q.Query)
result, err := q.Query.ExecContext(ctx, exec)
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to delete all from datahistoryjob")
}
rowsAff, err := result.RowsAffected()
if err != nil {
return 0, errors.Wrap(err, "postgres: failed to get rows affected by deleteall for datahistoryjob")
}
return rowsAff, nil
}
// DeleteAll deletes all rows in the slice, using an executor.
func (o DatahistoryjobSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) {
if len(o) == 0 {
return 0, nil
}
if len(datahistoryjobBeforeDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil {
return 0, err
}
}
}
var args []interface{}
for _, obj := range o {
pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), datahistoryjobPrimaryKeyMapping)
args = append(args, pkeyArgs...)
}
sql := "DELETE FROM \"datahistoryjob\" WHERE " +
strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, datahistoryjobPrimaryKeyColumns, len(o))
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, sql)
fmt.Fprintln(boil.DebugWriter, args)
}
result, err := exec.ExecContext(ctx, sql, args...)
if err != nil {
return 0, errors.Wrap(err, "postgres: unable to delete all from datahistoryjob slice")
}
rowsAff, err := result.RowsAffected()
if err != nil {
return 0, errors.Wrap(err, "postgres: failed to get rows affected by deleteall for datahistoryjob")
}
if len(datahistoryjobAfterDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doAfterDeleteHooks(ctx, exec); err != nil {
return 0, err
}
}
}
return rowsAff, nil
}
// Reload refetches the object from the database
// using the primary keys with an executor.
func (o *Datahistoryjob) Reload(ctx context.Context, exec boil.ContextExecutor) error {
ret, err := FindDatahistoryjob(ctx, exec, o.ID)
if err != nil {
return err
}
*o = *ret
return nil
}
// ReloadAll refetches every row with matching primary key column values
// and overwrites the original object slice with the newly updated slice.
func (o *DatahistoryjobSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error {
if o == nil || len(*o) == 0 {
return nil
}
slice := DatahistoryjobSlice{}
var args []interface{}
for _, obj := range *o {
pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), datahistoryjobPrimaryKeyMapping)
args = append(args, pkeyArgs...)
}
sql := "SELECT \"datahistoryjob\".* FROM \"datahistoryjob\" WHERE " +
strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, datahistoryjobPrimaryKeyColumns, len(*o))
q := queries.Raw(sql, args...)
err := q.Bind(ctx, exec, &slice)
if err != nil {
return errors.Wrap(err, "postgres: unable to reload all in DatahistoryjobSlice")
}
*o = slice
return nil
}
// DatahistoryjobExists checks if the Datahistoryjob row exists.
func DatahistoryjobExists(ctx context.Context, exec boil.ContextExecutor, iD string) (bool, error) {
var exists bool
sql := "select exists(select 1 from \"datahistoryjob\" where \"id\"=$1 limit 1)"
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, sql)
fmt.Fprintln(boil.DebugWriter, iD)
}
row := exec.QueryRowContext(ctx, sql, iD)
err := row.Scan(&exists)
if err != nil {
return false, errors.Wrap(err, "postgres: unable to check if datahistoryjob exists")
}
return exists, nil
}