commit 33bc3cc53fc15505aa461a63ba80f13c03ed6899 Author: Paul Scheunemann Date: Fri May 4 10:30:44 2018 +0200 Initial commit diff --git a/db.gotmpl b/db.gotmpl new file mode 100644 index 0000000..3178ec3 --- /dev/null +++ b/db.gotmpl @@ -0,0 +1,280 @@ +// Code generated by gnorm, DO NOT EDIT! + +package {{.Params.RootPkg}} + +import ( + "database/sql" + "database/sql/driver" + "encoding/json" + "errors" + "strconv" + "strings" +) + +// DB is the common interface for database operations. +// +// This should work with database/sql.DB and database/sql.Tx. +type DB interface { + Exec(string, ...interface{}) (sql.Result, error) + Query(string, ...interface{}) (*sql.Rows, error) + QueryRow(string, ...interface{}) *sql.Row +} + +// Bytes is an wrapper for []byte for storing bytes in postgres +type Bytes []byte + +// Value implements the driver Valuer interface. +func (b Bytes) Value() (driver.Value, error) { + return []byte(b), nil +} + +// Scan implements the Scanner interface. +func (b *Bytes) Scan(value interface{}) error { + bytes, ok := value.([]byte) + if !ok { + return errors.New("Type assertion .([]byte) failed") + } + + *b = Bytes(bytes) + return nil +} + +// NullBytes is an wrapper for []byte for storing bytes in postgres +type NullBytes struct { + Bytes []byte + Valid bool +} + +// Value implements the driver Valuer interface. +func (nb NullBytes) Value() (driver.Value, error) { + if !nb.Valid { + return nil, nil + } + return nb.Bytes, nil +} + +// Scan implements the Scanner interface. +func (nb *NullBytes) Scan(value interface{}) error { + if value == nil { + nb.Bytes, nb.Valid = []byte(""), false + return nil + } + nb.Valid = true + + var ok bool + nb.Bytes, ok = value.([]byte) + if !ok { + return errors.New("Type assertion .([]byte) failed") + } + return nil +} + +// Jsonb is a wrapper for map[string]interface{} for storing json into postgres +type Jsonb map[string]interface{} + +// Value marshals the json into the database +func (j Jsonb) Value() (driver.Value, error) { + return json.Marshal(j) +} + +// Scan Unmarshalls the bytes[] back into a Jsonb object +func (j *Jsonb) Scan(src interface{}) error { + source, ok := src.([]byte) + if !ok { + return errors.New("Type assertion .([]byte) failed") + } + + var i interface{} + err := json.Unmarshal(source, &i) + if err != nil { + return err + } + + if i == nil { + return nil + } + + *j, ok = i.(map[string]interface{}) + if !ok { + return errors.New("reading from DB into Jsonb, failed to convert to map[string]interface{}") + } + + return nil +} + +// UnOrdered is a convenience value to make it clear you're not sorting a query. +var UnOrdered = OrderBy{} + +// OrderByDesc returns a sort order descending by the given field. +func OrderByDesc(field string) OrderBy { + return OrderBy{ + Field: field, + Order: OrderDesc, + } +} + +// OrderByAsc returns a sort order ascending by the given field. +func OrderByAsc(field string) OrderBy { + return OrderBy{ + Field: field, + Order: OrderAsc, + } +} + +// OrderBy indicates how rows should be sorted. +type OrderBy struct { + Field string + Order SortOrder +} + +func (o OrderBy) String() string { + if o.Order == OrderNone { + return "" + } + return " ORDER BY " + o.Field + " " + o.Order.String() + " " +} + +// SortOrder defines how to order rows returned. +type SortOrder int + +// Defined sort orders for not sorted, descending and ascending. +const ( + OrderNone SortOrder = iota + OrderDesc + OrderAsc +) + +// String returns the sql string representation of this sort order. +func (s SortOrder) String() string { + switch s { + case OrderDesc: + return "DESC" + case OrderAsc: + return "ASC" + } + return "" +} + +// WhereClause has a String function should return a properly formatted where +// clause (not including the WHERE) for positional arguments starting at idx. +type WhereClause interface { + String(idx *int) string + Values() []interface{} +} + +// Comparison is used by WhereClauses to create valid sql. +type Comparison string + +// Comparison types. +const ( + CompEqual Comparison = " = " + CompGreater Comparison = " > " + CompLess Comparison = " < " + CompGTE Comparison = " >= " + CompLTE Comparison = " <= " + CompNE Comparison = " <> " +) + +type Where struct { + Field string + Comp Comparison + Value interface{} +} + +func (w Where) String(idx *int) string { + ret := w.Field + string(w.Comp) + "$" + strconv.Itoa(*idx) + (*idx)++ + return ret +} + +func (w Where) Values() []interface{} { + return []interface{}{w.Value} +} + +// NullClause is a clause that checks for a column being null or not. +type NullClause struct { + Field string + Null bool +} + +func (n NullClause) String(idx *int) string { + if n.Null { + return n.Field + " IS NULL " + } + return n.Field + " IS NOT NULL " +} + +func (n NullClause) Values() []interface{} { + return []interface{}{} +} + +// AndClause returns a WhereClause that serializes to the AND +// of all the given where clauses. +func AndClause(wheres ...WhereClause) WhereClause { + return andClause(wheres) +} + +type andClause []WhereClause + +func (a andClause) String(idx *int) string { + wheres := make([]string, len(a)) + for x := 0; x < len(a); x++ { + wheres[x] = a[x].String(idx) + } + return strings.Join(wheres, " AND ") +} + +func (a andClause) Values() []interface{} { + vals := make([]interface{}, 0, len(a)) + for x := 0; x < len(a); x++ { + vals = append(vals, a[x].Values()...) + } + return vals +} + +// OrClause returns a WhereClause that serializes to the OR +// of all the given where clauses. +func OrClause(wheres ...WhereClause) WhereClause { + return orClause(wheres) +} + +type orClause []WhereClause + +func (o orClause) String(idx *int) string { + wheres := make([]string, len(o)) + for x := 0; x < len(wheres); x++ { + wheres[x] = o[x].String(idx) + } + return strings.Join(wheres, " OR ") +} + +func (o orClause) Values() []interface{} { + vals := make([]interface{}, len(o)) + for x := 0; x < len(o); x++ { + vals = append(vals, o[x].Values()...) + } + return vals +} + +// InClause takes a slice of values that it matches against. +type InClause struct { + Field string + Vals []interface{} +} + +func (in InClause) String(idx *int) string { + ret := in.Field + " in (" + for x := range in.Vals { + if x != 0 { + ret += ", " + } + ret += "$" + strconv.Itoa(*idx) + (*idx)++ + } + ret += ")" + return ret +} + +func (in InClause) Values() []interface{} { + return in.Vals +} \ No newline at end of file diff --git a/doc.go b/doc.go new file mode 100644 index 0000000..99503f7 --- /dev/null +++ b/doc.go @@ -0,0 +1,21 @@ +// Package postgres contains autogenerated code for postgres databases. +// They are a couple of special paths in this directory: +// * /_templates contains the gnorm templates for generations +// * /gnorm.toml holds the gnorm configuration +// * /migrations contains the logic to migrate the database +// * /doc.go (this file) contains some instructions about this package +// +// All query and row definitions are autogenerated from the database, and +// converted into static code. This eliminates the need for an ORM, makes +// requests very fast and eliminates the possibilities of typos, syntax +// errors, SQL injections and therefore runtime problems with the database. +// +// If you need to update the database logic however, this can be a little +// tricky. As this approach takes the DB as the source of truth, start with +// writing the up- and down migration (migrations can be found in the +// assets/migrations/ dir). +// apply the migrations and run `gnorm gen` in this package, this will +// update all database related code. If there are compile time errors, the +// changes were incompatible with the codebase and you need to adjust the +// code a little (usually this is just a matter of minutes). +package postgres diff --git a/enum.gotmpl b/enum.gotmpl new file mode 100644 index 0000000..48ff214 --- /dev/null +++ b/enum.gotmpl @@ -0,0 +1,146 @@ +// Code generated by gnorm, DO NOT EDIT! + +package enum + +import "{{.Params.RootImport}}" + +{{$rootPkg := .Params.RootPkg}} + +{{- $type := .Enum.Name -}} + +// {{ $type }} is the '{{ .Enum.DBName }}' enum type from schema '{{ .Enum.Schema.Name }}'. +type {{ $type }} uint16 + +const ( + // Unknown{{$type}} defines an invalid {{$type}}. + Unknown{{$type}} {{$type}} = 0 +{{- range .Enum.Values }} + {{ .Name }}{{ $type }} {{$type}} = {{ .Value }} +{{ end -}} +) + +// String returns the string value of the {{ $type }}. +func (e {{ $type }}) String() string { + switch e { +{{- range .Enum.Values }} + case {{ .Name }}{{ $type }}: + return "{{ .DBName }}" +{{- end }} + default: + return "Unknown{{$type}}" + } +} + +// MarshalText marshals {{ $type }} into text. +func (e {{ $type }}) MarshalText() ([]byte, error) { + return []byte(e.String()), nil +} + +// UnmarshalText unmarshals {{ $type }} from text. +func (e *{{ $type }}) UnmarshalText(text []byte) error { + val, err := Parse{{$type}}(string(text)) + if err != nil { + return err + } + *e = val + return nil +} + +// Parse{{$type}} converts s into a {{$type}} if it is a valid +// stringified value of {{$type}}. +func Parse{{$type}}(s string) ({{$type}}, error) { + switch s { +{{- range .Enum.Values }} + case "{{ .DBName }}": + return {{ .Name }}{{ $type }}, nil +{{- end }} + default: + return Unknown{{$type}}, errors.New("invalid {{ $type }}") + } +} + +// Value satisfies the sql/driver.Valuer interface for {{ $type }}. +func (e {{ $type }}) Value() (driver.Value, error) { + return e.String(), nil +} + +// Scan satisfies the database/sql.Scanner interface for {{ $type }}. +func (e *{{ $type }}) Scan(src interface{}) error { + buf, ok := src.([]byte) + if !ok { + return errors.New("invalid {{ $type }}") + } + + return e.UnmarshalText(buf) +} + +// {{$type}}Field is a component that returns a {{$rootPkg}}.WhereClause that contains a +// comparison based on its field and a strongly typed value. +type {{$type}}Field string + +// Equals returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) Equals(v {{$type}}) {{$rootPkg}}.WhereClause { + return {{$rootPkg}}.Where{ + Field: string(f), + Comp: {{$rootPkg}}.CompEqual, + Value: v, + } +} + +// GreaterThan returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) GreaterThan(v {{$type}}) {{$rootPkg}}.WhereClause { + return {{$rootPkg}}.Where{ + Field: string(f), + Comp: {{$rootPkg}}.CompGreater, + Value: v, + } +} + +// LessThan returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) LessThan(v {{$type}}) {{$rootPkg}}.WhereClause { + return {{$rootPkg}}.Where{ + Field: string(f), + Comp: {{$rootPkg}}.CompEqual, + Value: v, + } +} + +// GreaterOrEqual returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) GreaterOrEqual(v {{$type}}) {{$rootPkg}}.WhereClause { + return {{$rootPkg}}.Where{ + Field: string(f), + Comp: {{$rootPkg}}.CompGTE, + Value: v, + } +} + +// LessOrEqual returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) LessOrEqual(v {{$type}}) {{$rootPkg}}.WhereClause { + return {{$rootPkg}}.Where{ + Field: string(f), + Comp: {{$rootPkg}}.CompLTE, + Value: v, + } +} + +// NotEqual returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) NotEqual(v {{$type}}) {{$rootPkg}}.WhereClause { + return {{$rootPkg}}.Where{ + Field: string(f), + Comp: {{$rootPkg}}.CompNE, + Value: v, + } +} + + +// In returns a {{$rootPkg}}.WhereClause for this field. +func (f {{$type}}Field) In(vals []{{$type}}) {{$rootPkg}}.WhereClause { + values := make([]interface{}, len(vals)) + for x := range vals { + values[x] = vals[x] + } + return {{$rootPkg}}.InClause{ + Field: string(f), + Vals: values, + } +} \ No newline at end of file diff --git a/fields.gotmpl b/fields.gotmpl new file mode 100644 index 0000000..4b4732c --- /dev/null +++ b/fields.gotmpl @@ -0,0 +1,103 @@ +// Code generated by gnorm, DO NOT EDIT! + +package {{.Params.RootPkg}} + +import ( + "github.com/lib/pq" + uuid "github.com/satori/go.uuid" +) + +{{ range (makeSlice "Bytes" "Jsonb" "int" "string" "sql.NullString" "int64" "sql.NullInt64" "float64" "sql.NullFloat64" "bool" "sql.NullBool" "time.Time" "pq.NullTime" "uint32" "uuid.UUID" "uuid.NullUUID" "hstore.Hstore") }} +{{ $fieldName := title (replace . "." "" 1) }} +// {{$fieldName}}Field is a component that returns a WhereClause that contains a +// comparison based on its field and a strongly typed value. +type {{$fieldName}}Field string + +// Equals returns a WhereClause for this field. +func (f {{$fieldName}}Field) Equals(v {{.}}) WhereClause { + return Where{ + Field: string(f), + Comp: CompEqual, + Value: v, + } +} + +// GreaterThan returns a WhereClause for this field. +func (f {{$fieldName}}Field) GreaterThan(v {{.}}) WhereClause { + return Where{ + Field: string(f), + Comp: CompGreater, + Value: v, + } +} + +// LessThan returns a WhereClause for this field. +func (f {{$fieldName}}Field) LessThan(v {{.}}) WhereClause { + return Where{ + Field: string(f), + Comp: CompLess, + Value: v, + } +} + +// GreaterOrEqual returns a WhereClause for this field. +func (f {{$fieldName}}Field) GreaterOrEqual(v {{.}}) WhereClause { + return Where{ + Field: string(f), + Comp: CompGTE, + Value: v, + } +} + +// LessOrEqual returns a WhereClause for this field. +func (f {{$fieldName}}Field) LessOrEqual(v {{.}}) WhereClause { + return Where{ + Field: string(f), + Comp: CompLTE, + Value: v, + } +} + +// NotEqual returns a WhereClause for this field. +func (f {{$fieldName}}Field) NotEqual(v {{.}}) WhereClause { + return Where{ + Field: string(f), + Comp: CompNE, + Value: v, + } +} + +// In returns a WhereClause for this field. +func (f {{$fieldName}}Field) In(vals []{{.}}) WhereClause { + values := make([]interface{}, len(vals)) + for x := range vals { + values[x] = vals[x] + } + return InClause{ + Field: string(f), + Vals: values, + } +} + +{{end}} + +{{ range (makeSlice "Jsonb" "sql.NullString" "sql.NullInt64" "sql.NullFloat64" "sql.NullBool" "pq.NullTime" "uuid.NullUUID") }} +{{ $fieldName := title (replace . "." "" 1) }} +// IsNull returns a WhereClause that matches when this field is NULL. +func (f {{$fieldName}}Field) IsNull() WhereClause { + return NullClause{ + Field: string(f), + Null: true, + } +} + +// IsNotNull returns a WhereClause that matches when this field is not NULL. +func (f {{$fieldName}}Field) IsNotNull() WhereClause { + return NullClause{ + Field: string(f), + Null: false, + } +} + +{{end}} + diff --git a/gnorm.toml b/gnorm.toml new file mode 100644 index 0000000..6fb5a39 --- /dev/null +++ b/gnorm.toml @@ -0,0 +1,83 @@ +DBType = "postgres" + +# ConnStr is the connection string for the database where the schema is +# generated from +ConnStr = "dbname=postgres host=127.0.0.1 sslmode=disable user=postgres password=postgres" + +Schemas = ["public"] + +ExcludeTables = ["schema_migrations"] + +# Run this command after generation, to lint the generated files +PostRun = ["goimports", "-w", "$GNORMFILE"] + +# PascalCase should be used for our go database +NameConversion = "{{pascal .}}" + +# Generate in the current directory. If this is changed, the RootPkg +# below should match the folder name. +OutputDir = "." + +[Params] +# RootPkg is the package declaration for the output dir. It should match the +# directory name above. +RootPkg = "postgres" + +# RootImport is the import path for the output directory. +RootImport = "git.klink.asia/paul/kregistry/database/postgres" + +[SchemaPaths] +"fields.go" = "_templates/fields.gotmpl" +"db.go" = "_templates/db.gotmpl" + +[TablePaths] +"{{toLower .Table}}/{{toLower .Table}}.go" = "_templates/table.gotmpl" + +[EnumPaths] +"enum/{{toLower .Enum}}.go" = "_templates/enum.gotmpl" + +[TypeMap] +"timestamp with time zone" = "time.Time" +"timestamp without time zone" = "time.Time" +"timestamptz" = "time.Time" +"timestamp" = "time.Time" +"varchar" = "string" +"text" = "string" +"citext" = "string" +"boolean" = "bool" +"uuid" = "uuid.UUID" # from "github.com/satori/go.uuid" +"character varying" = "string" +"character" = "string" +"bigserial" = "int64" +"bigint" = "int64" +"integer" = "int" +"int4" = "int32" +"numeric" = "float64" +"real" = "float64" +"hstore" = "hstore.Hstore" # from "github.com/lib/pq/hstore" +"jsonb" = "postgres.Jsonb" # package name here has to be kept in sync with RootPkg. +"bytea" = "postgres.Bytes" + +# needs to be kept in sync with the enum template's package name +"rank" = "enum.Rank" + + +[NullableTypeMap] +"timestamp with time zone" = "pq.NullTime" +"timestamptz" = "pq.NullTime" +"timestamp" = "pq.NullTime" +"text" = "sql.NullString" +"citext" = "sql.NullString" +"varchar" = "sql.NullString" +"public.citext" = "sql.NullString" +"boolean" = "sql.NullBool" +"uuid" = "uuid.NullUUID" +"character varying" = "sql.NullString" +"character" = "sql.NullString" +"integer" = "sql.NullInt64" +"bigint" = "sql.NullInt64" +"numeric" = "sql.NullFloat64" +"real" = "sql.NullFloat64" +"hstore" = "hstore.Hstore" +"jsonb" = "postgres.Jsonb" # package name here has to be kept in sync with RootPkg. +"bytea" = "postgres.NullBytes" diff --git a/table.gotmpl b/table.gotmpl new file mode 100644 index 0000000..9d06f01 --- /dev/null +++ b/table.gotmpl @@ -0,0 +1,170 @@ +// Code generated by gnorm, DO NOT EDIT! + +package {{toLower .Table.Name}} + +import ( + "{{.Params.RootImport}}" + "{{.Params.RootImport}}/enum" +) + +{{$rootPkg := .Params.RootPkg -}} +{{$table := .Table.DBName -}} +{{$schema := .Table.Schema.DBName -}} +// Row represents a row from '{{ $table }}'. +type Row struct { +{{- range .Table.Columns }} + {{ .Name }} {{ .Type }} // {{ .DBName }}{{if .IsPrimaryKey}} (PK){{end}} +{{- end }} +} + + +// Field values for every column in {{.Table.Name}}. +var ( +{{- range .Table.Columns }} + {{- if or (hasPrefix .Type (printf "%s." $rootPkg)) (hasPrefix .Type "enum.")}} + {{.Name}}Col {{ .Type }}Field = "{{ .DBName }}" + {{- else}} + {{.Name}}Col {{$rootPkg}}.{{ title (replace .Type "." "" 1) }}Field = "{{ .DBName }}" + {{- end}} +{{- end}} +) + +// Query retrieves rows from '{{ $table }}' as a slice of Row. +func Query(db {{$rootPkg}}.DB, where {{$rootPkg}}.WhereClause) ([]*Row, error) { + const origsqlstr = `SELECT + {{ join .Table.Columns.DBNames ", " }} + FROM {{$schema}}.{{ $table }} WHERE (` + + idx := 1 + sqlstr := origsqlstr + where.String(&idx) + ") " + + var vals []*Row + q, err := db.Query(sqlstr, where.Values()...) + if err != nil { + return nil, err + } + for q.Next() { + r := Row{} + err := q.Scan( {{ join (.Table.Columns.Names.Sprintf "&r.%s") ", " }} ) + if err != nil { + return nil, err + } + vals = append(vals, &r) + } + return vals, nil +} + +// One retrieve one row from '{{ $table }}'. +func One(db {{$rootPkg}}.DB, where {{$rootPkg}}.WhereClause) (*Row, error) { + const origsqlstr = `SELECT + {{ join .Table.Columns.DBNames ", " }} + FROM {{$schema}}.{{ $table }} WHERE (` + + idx := 1 + sqlstr := origsqlstr + where.String(&idx) + ") " + + row := db.QueryRow(sqlstr, where.Values()...) + + r := Row{} + err := row.Scan({{ join (.Table.Columns.Names.Sprintf "&r.%s") ", " }} ) + if err != nil { + return nil, err + } + + return &r, nil +} + +{{- define "values" -}} +{{$nums := numbers 1 . -}} +{{$indices := $nums.Sprintf "$%s" -}} +{{join $indices ", " -}} +{{end}} + +// Insert inserts the row into the database, returning the autogenerated +// primary keys. If the primary keys cannot be autogenerated, please use +// InsertExact instead. +func Insert(db {{$rootPkg}}.DB, r *Row) error { + const sqlstr = `INSERT INTO {{ $table }} ( + {{ join (.Table.Columns.DBNames.Except .Table.PrimaryKeys.DBNames) ", " }} + ) VALUES ( + {{template "values" (len (.Table.Columns.Names.Except .Table.PrimaryKeys.Names)) }} + ) RETURNING {{join (.Table.PrimaryKeys.DBNames.Sprintf "%s") ", "}}` + err := db.QueryRow(sqlstr, {{join ((.Table.Columns.Names.Except .Table.PrimaryKeys.Names).Sprintf "r.%s") ", "}}).Scan({{join (.Table.PrimaryKeys.Names.Sprintf "&r.%s") ", "}}) + return errors.Wrap(err, "insert {{.Table.Name}}") +} + +// InsertExact works like Insert, but lets you specify the primary keys. +// For most cases you should consider using Insert. +func InsertExact(db {{$rootPkg}}.DB, r *Row) error { + const sqlstr = `INSERT INTO {{ $table }} ( + {{ join .Table.Columns.DBNames ", " }} + ) VALUES ( + {{template "values" (len .Table.Columns) }} + )` + _, err := db.Exec(sqlstr, {{join (.Table.Columns.Names.Sprintf "r.%s") ", "}}) + return errors.Wrap(err, "insert {{.Table.Name}}") +} + +{{- if .Table.HasPrimaryKey }} +{{- if gt (len .Table.Columns) (len .Table.PrimaryKeys) }} +{{- $nonPKFields := join ((.Table.Columns.Names.Except .Table.PrimaryKeys.Names).Sprintf "r.%s") ", "}} +{{- $PKFields := join (.Table.PrimaryKeys.Names.Sprintf "r.%s") ", "}} +{{- $nonPKNames := .Table.Columns.DBNames.Except .Table.PrimaryKeys.DBNames}} +{{- $numNonPKs := sub (len .Table.Columns) (len .Table.PrimaryKeys)}} +{{- $updateCols := join (.Table.Columns.DBNames.Except .Table.PrimaryKeys.DBNames) ", " }} + + +// Update updates the Row in the database. +func Update(db {{$rootPkg}}.DB, r *Row) error { + const sqlstr = `UPDATE {{ $table }} SET ( + {{$updateCols}} + ) = ( + {{ template "values" $numNonPKs }} + ) WHERE + {{- $PKnums := numbers (inc $numNonPKs) (len .Table.Columns)}} + {{join .Table.PrimaryKeys.DBNames ", "}} = {{ join ($PKnums.Sprintf "$%s") ", " }} + ` + _, err := db.Exec(sqlstr, {{$nonPKFields}}, {{$PKFields}}) + return errors.Wrap(err, "update {{.Table.Name}}:") +} + +// Upsert performs an upsert for {{ .Table.Name }}. +// +// NOTE: PostgreSQL 9.5+ only +func Upsert(db {{$rootPkg}}.DB, r *Row) error { + const sqlstr = `INSERT INTO {{ $table }} ( + {{$updateCols}}, {{join .Table.PrimaryKeys.DBNames ", "}} + ) VALUES ( + {{template "values" (len .Table.Columns) }} + ) ON CONFLICT ({{join .Table.PrimaryKeys.DBNames ", " }}) DO UPDATE SET ( + {{$updateCols}} + ) = ( + {{ template "values" $numNonPKs }} + )` + + _, err := db.Exec(sqlstr, {{$nonPKFields}}, {{$PKFields}}) + return errors.Wrap(err, "upsert {{.Table.Name}}") +} +{{ else }} +// Update statements omitted due to lack of primary key or lack of updateable fields +{{ end }} + + +// Delete deletes the Row from the database. +func Delete( + db {{$rootPkg}}.DB, +{{- range .Table.PrimaryKeys}} + {{camel .DBName}} {{.Type}}, +{{end -}} +) error { + const sqlstr = `DELETE FROM {{ $table }} WHERE {{join .Table.PrimaryKeys.DBNames ", "}} = {{template "values" (len .Table.PrimaryKeys)}}` + + _, err := db.Exec( + sqlstr, + {{- range .Table.PrimaryKeys}} + {{camel .DBName}}, + {{end -}} + ) + return errors.Wrap(err, "delete {{.Table.Name}}") +} +{{- end }} \ No newline at end of file