I Guess I Need To Check In The Generated SQL For This To Work

This commit is contained in:
Annika Merris 2024-02-02 12:55:20 -05:00
parent 89595b6f71
commit 8f3d38a066
2 changed files with 575 additions and 1 deletions

2
.gitignore vendored
View file

@ -1,2 +1,2 @@
sql/**/*.go
# sql/**/*.go
docker/data

574
sql/powerItem/query.sql.go Normal file
View file

@ -0,0 +1,574 @@
// Code generated by pggen. DO NOT EDIT.
package powerItem
import (
"context"
"fmt"
"github.com/jackc/pgconn"
"github.com/jackc/pgtype"
"github.com/jackc/pgx/v4"
)
// Querier is a typesafe Go interface backed by SQL queries.
//
// Methods ending with Batch enqueue a query to run later in a pgx.Batch. After
// calling SendBatch on pgx.Conn, pgxpool.Pool, or pgx.Tx, use the Scan methods
// to parse the results.
type Querier interface {
// GetAllItems finds all items.
GetAllItems(ctx context.Context) ([]GetAllItemsRow, error)
// GetAllItemsBatch enqueues a GetAllItems query into batch to be executed
// later by the batch.
GetAllItemsBatch(batch genericBatch)
// GetAllItemsScan scans the result of an executed GetAllItemsBatch query.
GetAllItemsScan(results pgx.BatchResults) ([]GetAllItemsRow, error)
// GetAllByType finds all items of a specific type.
GetAllByType(ctx context.Context, itemType int32) ([]GetAllByTypeRow, error)
// GetAllByTypeBatch enqueues a GetAllByType query into batch to be executed
// later by the batch.
GetAllByTypeBatch(batch genericBatch, itemType int32)
// GetAllByTypeScan scans the result of an executed GetAllByTypeBatch query.
GetAllByTypeScan(results pgx.BatchResults) ([]GetAllByTypeRow, error)
// FindByID finds items by the ID
FindByID(ctx context.Context, id pgtype.UUID) (FindByIDRow, error)
// FindByIDBatch enqueues a FindByID query into batch to be executed
// later by the batch.
FindByIDBatch(batch genericBatch, id pgtype.UUID)
// FindByIDScan scans the result of an executed FindByIDBatch query.
FindByIDScan(results pgx.BatchResults) (FindByIDRow, error)
// AddNewItem inserts a new power item
AddNewItem(ctx context.Context, params AddNewItemParams) (AddNewItemRow, error)
// AddNewItemBatch enqueues a AddNewItem query into batch to be executed
// later by the batch.
AddNewItemBatch(batch genericBatch, params AddNewItemParams)
// AddNewItemScan scans the result of an executed AddNewItemBatch query.
AddNewItemScan(results pgx.BatchResults) (AddNewItemRow, error)
// AddNewItemWithID inserts a new power item
AddNewItemWithID(ctx context.Context, params AddNewItemWithIDParams) (AddNewItemWithIDRow, error)
// AddNewItemWithIDBatch enqueues a AddNewItemWithID query into batch to be executed
// later by the batch.
AddNewItemWithIDBatch(batch genericBatch, params AddNewItemWithIDParams)
// AddNewItemWithIDScan scans the result of an executed AddNewItemWithIDBatch query.
AddNewItemWithIDScan(results pgx.BatchResults) (AddNewItemWithIDRow, error)
}
type DBQuerier struct {
conn genericConn // underlying Postgres transport to use
types *typeResolver // resolve types by name
}
var _ Querier = &DBQuerier{}
// genericConn is a connection to a Postgres database. This is usually backed by
// *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
type genericConn interface {
// Query executes sql with args. If there is an error the returned Rows will
// be returned in an error state. So it is allowed to ignore the error
// returned from Query and handle it in Rows.
Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error)
// QueryRow is a convenience wrapper over Query. Any error that occurs while
// querying is deferred until calling Scan on the returned Row. That Row will
// error with pgx.ErrNoRows if no rows are returned.
QueryRow(ctx context.Context, sql string, args ...interface{}) pgx.Row
// Exec executes sql. sql can be either a prepared statement name or an SQL
// string. arguments should be referenced positionally from the sql string
// as $1, $2, etc.
Exec(ctx context.Context, sql string, arguments ...interface{}) (pgconn.CommandTag, error)
}
// genericBatch batches queries to send in a single network request to a
// Postgres server. This is usually backed by *pgx.Batch.
type genericBatch interface {
// Queue queues a query to batch b. query can be an SQL query or the name of a
// prepared statement. See Queue on *pgx.Batch.
Queue(query string, arguments ...interface{})
}
// NewQuerier creates a DBQuerier that implements Querier. conn is typically
// *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
func NewQuerier(conn genericConn) *DBQuerier {
return NewQuerierConfig(conn, QuerierConfig{})
}
type QuerierConfig struct {
// DataTypes contains pgtype.Value to use for encoding and decoding instead
// of pggen-generated pgtype.ValueTranscoder.
//
// If OIDs are available for an input parameter type and all of its
// transitive dependencies, pggen will use the binary encoding format for
// the input parameter.
DataTypes []pgtype.DataType
}
// NewQuerierConfig creates a DBQuerier that implements Querier with the given
// config. conn is typically *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
func NewQuerierConfig(conn genericConn, cfg QuerierConfig) *DBQuerier {
return &DBQuerier{conn: conn, types: newTypeResolver(cfg.DataTypes)}
}
// WithTx creates a new DBQuerier that uses the transaction to run all queries.
func (q *DBQuerier) WithTx(tx pgx.Tx) (*DBQuerier, error) {
return &DBQuerier{conn: tx}, nil
}
// preparer is any Postgres connection transport that provides a way to prepare
// a statement, most commonly *pgx.Conn.
type preparer interface {
Prepare(ctx context.Context, name, sql string) (sd *pgconn.StatementDescription, err error)
}
// PrepareAllQueries executes a PREPARE statement for all pggen generated SQL
// queries in querier files. Typical usage is as the AfterConnect callback
// for pgxpool.Config
//
// pgx will use the prepared statement if available. Calling PrepareAllQueries
// is an optional optimization to avoid a network round-trip the first time pgx
// runs a query if pgx statement caching is enabled.
func PrepareAllQueries(ctx context.Context, p preparer) error {
if _, err := p.Prepare(ctx, getAllItemsSQL, getAllItemsSQL); err != nil {
return fmt.Errorf("prepare query 'GetAllItems': %w", err)
}
if _, err := p.Prepare(ctx, getAllByTypeSQL, getAllByTypeSQL); err != nil {
return fmt.Errorf("prepare query 'GetAllByType': %w", err)
}
if _, err := p.Prepare(ctx, findByIDSQL, findByIDSQL); err != nil {
return fmt.Errorf("prepare query 'FindByID': %w", err)
}
if _, err := p.Prepare(ctx, addNewItemSQL, addNewItemSQL); err != nil {
return fmt.Errorf("prepare query 'AddNewItem': %w", err)
}
if _, err := p.Prepare(ctx, addNewItemWithIDSQL, addNewItemWithIDSQL); err != nil {
return fmt.Errorf("prepare query 'AddNewItemWithID': %w", err)
}
return nil
}
// typeResolver looks up the pgtype.ValueTranscoder by Postgres type name.
type typeResolver struct {
connInfo *pgtype.ConnInfo // types by Postgres type name
}
func newTypeResolver(types []pgtype.DataType) *typeResolver {
ci := pgtype.NewConnInfo()
for _, typ := range types {
if txt, ok := typ.Value.(textPreferrer); ok && typ.OID != unknownOID {
typ.Value = txt.ValueTranscoder
}
ci.RegisterDataType(typ)
}
return &typeResolver{connInfo: ci}
}
// findValue find the OID, and pgtype.ValueTranscoder for a Postgres type name.
func (tr *typeResolver) findValue(name string) (uint32, pgtype.ValueTranscoder, bool) {
typ, ok := tr.connInfo.DataTypeForName(name)
if !ok {
return 0, nil, false
}
v := pgtype.NewValue(typ.Value)
return typ.OID, v.(pgtype.ValueTranscoder), true
}
// setValue sets the value of a ValueTranscoder to a value that should always
// work and panics if it fails.
func (tr *typeResolver) setValue(vt pgtype.ValueTranscoder, val interface{}) pgtype.ValueTranscoder {
if err := vt.Set(val); err != nil {
panic(fmt.Sprintf("set ValueTranscoder %T to %+v: %s", vt, val, err))
}
return vt
}
const getAllItemsSQL = `SELECT
id,
itemType,
iconURL,
itemName,
minItemPower,
maxItemPower,
rarity,
origin,
tooltip,
isEventItem
FROM
powerItem;`
type GetAllItemsRow struct {
ID pgtype.UUID `json:"id"`
Itemtype *int32 `json:"itemtype"`
Iconurl *string `json:"iconurl"`
Itemname *string `json:"itemname"`
Minitempower *int32 `json:"minitempower"`
Maxitempower *int32 `json:"maxitempower"`
Rarity *int32 `json:"rarity"`
Origin *string `json:"origin"`
Tooltip *string `json:"tooltip"`
Iseventitem *bool `json:"iseventitem"`
}
// GetAllItems implements Querier.GetAllItems.
func (q *DBQuerier) GetAllItems(ctx context.Context) ([]GetAllItemsRow, error) {
ctx = context.WithValue(ctx, "pggen_query_name", "GetAllItems")
rows, err := q.conn.Query(ctx, getAllItemsSQL)
if err != nil {
return nil, fmt.Errorf("query GetAllItems: %w", err)
}
defer rows.Close()
items := []GetAllItemsRow{}
for rows.Next() {
var item GetAllItemsRow
if err := rows.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return nil, fmt.Errorf("scan GetAllItems row: %w", err)
}
items = append(items, item)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("close GetAllItems rows: %w", err)
}
return items, err
}
// GetAllItemsBatch implements Querier.GetAllItemsBatch.
func (q *DBQuerier) GetAllItemsBatch(batch genericBatch) {
batch.Queue(getAllItemsSQL)
}
// GetAllItemsScan implements Querier.GetAllItemsScan.
func (q *DBQuerier) GetAllItemsScan(results pgx.BatchResults) ([]GetAllItemsRow, error) {
rows, err := results.Query()
if err != nil {
return nil, fmt.Errorf("query GetAllItemsBatch: %w", err)
}
defer rows.Close()
items := []GetAllItemsRow{}
for rows.Next() {
var item GetAllItemsRow
if err := rows.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return nil, fmt.Errorf("scan GetAllItemsBatch row: %w", err)
}
items = append(items, item)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("close GetAllItemsBatch rows: %w", err)
}
return items, err
}
const getAllByTypeSQL = `SELECT
id,
itemType,
iconURL,
itemName,
minItemPower,
maxItemPower,
rarity,
origin,
tooltip,
isEventItem
FROM
powerItem
WHERE
itemType = $1;`
type GetAllByTypeRow struct {
ID pgtype.UUID `json:"id"`
Itemtype int32 `json:"itemtype"`
Iconurl string `json:"iconurl"`
Itemname string `json:"itemname"`
Minitempower int32 `json:"minitempower"`
Maxitempower int32 `json:"maxitempower"`
Rarity int32 `json:"rarity"`
Origin string `json:"origin"`
Tooltip *string `json:"tooltip"`
Iseventitem *bool `json:"iseventitem"`
}
// GetAllByType implements Querier.GetAllByType.
func (q *DBQuerier) GetAllByType(ctx context.Context, itemType int32) ([]GetAllByTypeRow, error) {
ctx = context.WithValue(ctx, "pggen_query_name", "GetAllByType")
rows, err := q.conn.Query(ctx, getAllByTypeSQL, itemType)
if err != nil {
return nil, fmt.Errorf("query GetAllByType: %w", err)
}
defer rows.Close()
items := []GetAllByTypeRow{}
for rows.Next() {
var item GetAllByTypeRow
if err := rows.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return nil, fmt.Errorf("scan GetAllByType row: %w", err)
}
items = append(items, item)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("close GetAllByType rows: %w", err)
}
return items, err
}
// GetAllByTypeBatch implements Querier.GetAllByTypeBatch.
func (q *DBQuerier) GetAllByTypeBatch(batch genericBatch, itemType int32) {
batch.Queue(getAllByTypeSQL, itemType)
}
// GetAllByTypeScan implements Querier.GetAllByTypeScan.
func (q *DBQuerier) GetAllByTypeScan(results pgx.BatchResults) ([]GetAllByTypeRow, error) {
rows, err := results.Query()
if err != nil {
return nil, fmt.Errorf("query GetAllByTypeBatch: %w", err)
}
defer rows.Close()
items := []GetAllByTypeRow{}
for rows.Next() {
var item GetAllByTypeRow
if err := rows.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return nil, fmt.Errorf("scan GetAllByTypeBatch row: %w", err)
}
items = append(items, item)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("close GetAllByTypeBatch rows: %w", err)
}
return items, err
}
const findByIDSQL = `SELECT
id,
itemType,
iconURL,
itemName,
minItemPower,
maxItemPower,
rarity,
origin,
tooltip,
isEventItem
FROM
powerItem
WHERE
id = $1;`
type FindByIDRow struct {
ID pgtype.UUID `json:"id"`
Itemtype int32 `json:"itemtype"`
Iconurl string `json:"iconurl"`
Itemname string `json:"itemname"`
Minitempower int32 `json:"minitempower"`
Maxitempower int32 `json:"maxitempower"`
Rarity int32 `json:"rarity"`
Origin string `json:"origin"`
Tooltip *string `json:"tooltip"`
Iseventitem *bool `json:"iseventitem"`
}
// FindByID implements Querier.FindByID.
func (q *DBQuerier) FindByID(ctx context.Context, id pgtype.UUID) (FindByIDRow, error) {
ctx = context.WithValue(ctx, "pggen_query_name", "FindByID")
row := q.conn.QueryRow(ctx, findByIDSQL, id)
var item FindByIDRow
if err := row.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return item, fmt.Errorf("query FindByID: %w", err)
}
return item, nil
}
// FindByIDBatch implements Querier.FindByIDBatch.
func (q *DBQuerier) FindByIDBatch(batch genericBatch, id pgtype.UUID) {
batch.Queue(findByIDSQL, id)
}
// FindByIDScan implements Querier.FindByIDScan.
func (q *DBQuerier) FindByIDScan(results pgx.BatchResults) (FindByIDRow, error) {
row := results.QueryRow()
var item FindByIDRow
if err := row.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return item, fmt.Errorf("scan FindByIDBatch row: %w", err)
}
return item, nil
}
const addNewItemSQL = `INSERT INTO
powerItem (
itemType,
iconURL,
itemName,
minItemPower,
maxItemPower,
rarity,
origin,
tooltip,
isEventItem
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9
) RETURNING *;`
type AddNewItemParams struct {
ItemType int32 `json:"itemType"`
IconUrl string `json:"iconUrl"`
ItemName string `json:"itemName"`
MinItemPower int32 `json:"minItemPower"`
MaxItemPower int32 `json:"maxItemPower"`
Rarity int32 `json:"rarity"`
Origin string `json:"origin"`
Tooltip string `json:"tooltip"`
IsEventItem bool `json:"isEventItem"`
}
type AddNewItemRow struct {
ID pgtype.UUID `json:"id"`
Itemtype int32 `json:"itemtype"`
Iconurl string `json:"iconurl"`
Itemname string `json:"itemname"`
Minitempower int32 `json:"minitempower"`
Maxitempower int32 `json:"maxitempower"`
Rarity int32 `json:"rarity"`
Origin string `json:"origin"`
Tooltip *string `json:"tooltip"`
Iseventitem *bool `json:"iseventitem"`
}
// AddNewItem implements Querier.AddNewItem.
func (q *DBQuerier) AddNewItem(ctx context.Context, params AddNewItemParams) (AddNewItemRow, error) {
ctx = context.WithValue(ctx, "pggen_query_name", "AddNewItem")
row := q.conn.QueryRow(ctx, addNewItemSQL, params.ItemType, params.IconUrl, params.ItemName, params.MinItemPower, params.MaxItemPower, params.Rarity, params.Origin, params.Tooltip, params.IsEventItem)
var item AddNewItemRow
if err := row.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return item, fmt.Errorf("query AddNewItem: %w", err)
}
return item, nil
}
// AddNewItemBatch implements Querier.AddNewItemBatch.
func (q *DBQuerier) AddNewItemBatch(batch genericBatch, params AddNewItemParams) {
batch.Queue(addNewItemSQL, params.ItemType, params.IconUrl, params.ItemName, params.MinItemPower, params.MaxItemPower, params.Rarity, params.Origin, params.Tooltip, params.IsEventItem)
}
// AddNewItemScan implements Querier.AddNewItemScan.
func (q *DBQuerier) AddNewItemScan(results pgx.BatchResults) (AddNewItemRow, error) {
row := results.QueryRow()
var item AddNewItemRow
if err := row.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return item, fmt.Errorf("scan AddNewItemBatch row: %w", err)
}
return item, nil
}
const addNewItemWithIDSQL = `INSERT INTO
powerItem (
id,
itemType,
iconURL,
itemName,
minItemPower,
maxItemPower,
rarity,
origin,
tooltip,
isEventItem
)
VALUES
(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8,
$9,
$10
) RETURNING *;`
type AddNewItemWithIDParams struct {
ID pgtype.UUID `json:"id"`
ItemType int32 `json:"itemType"`
IconUrl string `json:"iconUrl"`
ItemName string `json:"itemName"`
MinItemPower int32 `json:"minItemPower"`
MaxItemPower int32 `json:"maxItemPower"`
Rarity int32 `json:"rarity"`
Origin string `json:"origin"`
Tooltip string `json:"tooltip"`
IsEventItem bool `json:"isEventItem"`
}
type AddNewItemWithIDRow struct {
ID pgtype.UUID `json:"id"`
Itemtype int32 `json:"itemtype"`
Iconurl string `json:"iconurl"`
Itemname string `json:"itemname"`
Minitempower int32 `json:"minitempower"`
Maxitempower int32 `json:"maxitempower"`
Rarity int32 `json:"rarity"`
Origin string `json:"origin"`
Tooltip *string `json:"tooltip"`
Iseventitem *bool `json:"iseventitem"`
}
// AddNewItemWithID implements Querier.AddNewItemWithID.
func (q *DBQuerier) AddNewItemWithID(ctx context.Context, params AddNewItemWithIDParams) (AddNewItemWithIDRow, error) {
ctx = context.WithValue(ctx, "pggen_query_name", "AddNewItemWithID")
row := q.conn.QueryRow(ctx, addNewItemWithIDSQL, params.ID, params.ItemType, params.IconUrl, params.ItemName, params.MinItemPower, params.MaxItemPower, params.Rarity, params.Origin, params.Tooltip, params.IsEventItem)
var item AddNewItemWithIDRow
if err := row.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return item, fmt.Errorf("query AddNewItemWithID: %w", err)
}
return item, nil
}
// AddNewItemWithIDBatch implements Querier.AddNewItemWithIDBatch.
func (q *DBQuerier) AddNewItemWithIDBatch(batch genericBatch, params AddNewItemWithIDParams) {
batch.Queue(addNewItemWithIDSQL, params.ID, params.ItemType, params.IconUrl, params.ItemName, params.MinItemPower, params.MaxItemPower, params.Rarity, params.Origin, params.Tooltip, params.IsEventItem)
}
// AddNewItemWithIDScan implements Querier.AddNewItemWithIDScan.
func (q *DBQuerier) AddNewItemWithIDScan(results pgx.BatchResults) (AddNewItemWithIDRow, error) {
row := results.QueryRow()
var item AddNewItemWithIDRow
if err := row.Scan(&item.ID, &item.Itemtype, &item.Iconurl, &item.Itemname, &item.Minitempower, &item.Maxitempower, &item.Rarity, &item.Origin, &item.Tooltip, &item.Iseventitem); err != nil {
return item, fmt.Errorf("scan AddNewItemWithIDBatch row: %w", err)
}
return item, nil
}
// textPreferrer wraps a pgtype.ValueTranscoder and sets the preferred encoding
// format to text instead binary (the default). pggen uses the text format
// when the OID is unknownOID because the binary format requires the OID.
// Typically occurs if the results from QueryAllDataTypes aren't passed to
// NewQuerierConfig.
type textPreferrer struct {
pgtype.ValueTranscoder
typeName string
}
// PreferredParamFormat implements pgtype.ParamFormatPreferrer.
func (t textPreferrer) PreferredParamFormat() int16 { return pgtype.TextFormatCode }
func (t textPreferrer) NewTypeValue() pgtype.Value {
return textPreferrer{ValueTranscoder: pgtype.NewValue(t.ValueTranscoder).(pgtype.ValueTranscoder), typeName: t.typeName}
}
func (t textPreferrer) TypeName() string {
return t.typeName
}
// unknownOID means we don't know the OID for a type. This is okay for decoding
// because pgx call DecodeText or DecodeBinary without requiring the OID. For
// encoding parameters, pggen uses textPreferrer if the OID is unknown.
const unknownOID = 0