mirror of
https://github.com/sqlc-dev/sqlc.git
synced 2025-04-24 14:50:49 +03:00
refactor(codegen): Remove golang and json settings from plugin proto (#2822)
* refactor(codegen): remove golang and json settings from plugin proto In order to simplify the codegen plugin proto, I've removed the Go and JSON plugin settings and instead pass those settings encoded as JSON to their respective plugins. * clean up proto * update sqlc-gen-json tests * put json plugin settings in `PluginOptions`
This commit is contained in:
parent
1b242a2bb8
commit
5ef20c6749
18 changed files with 412 additions and 3103 deletions
|
@ -9,6 +9,7 @@
|
|||
pkgs.git-cliff
|
||||
pkgs.govulncheck
|
||||
pkgs.gopls
|
||||
pkgs.golint
|
||||
pkgs.mysql-shell
|
||||
pkgs.postgresql_15
|
||||
pkgs.python311
|
||||
|
|
|
@ -3,6 +3,7 @@ package cmd
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
@ -16,7 +17,7 @@ import (
|
|||
"google.golang.org/grpc/status"
|
||||
|
||||
"github.com/sqlc-dev/sqlc/internal/codegen/golang"
|
||||
"github.com/sqlc-dev/sqlc/internal/codegen/json"
|
||||
genjson "github.com/sqlc-dev/sqlc/internal/codegen/json"
|
||||
"github.com/sqlc-dev/sqlc/internal/compiler"
|
||||
"github.com/sqlc-dev/sqlc/internal/config"
|
||||
"github.com/sqlc-dev/sqlc/internal/config/convert"
|
||||
|
@ -166,7 +167,7 @@ func Generate(ctx context.Context, e Env, dir, filename string, stderr io.Writer
|
|||
Gen: config.SQLGen{JSON: sql.Gen.JSON},
|
||||
})
|
||||
}
|
||||
for i, _ := range sql.Codegen {
|
||||
for i := range sql.Codegen {
|
||||
pairs = append(pairs, outPair{
|
||||
SQL: sql,
|
||||
Plugin: &sql.Codegen[i],
|
||||
|
@ -399,10 +400,20 @@ func codegen(ctx context.Context, combo config.CombinedSettings, sql outPair, re
|
|||
case sql.Gen.Go != nil:
|
||||
out = combo.Go.Out
|
||||
handler = ext.HandleFunc(golang.Generate)
|
||||
opts, err := json.Marshal(sql.Gen.Go)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("opts marshal failed: %w", err)
|
||||
}
|
||||
req.PluginOptions = opts
|
||||
|
||||
case sql.Gen.JSON != nil:
|
||||
out = combo.JSON.Out
|
||||
handler = ext.HandleFunc(json.Generate)
|
||||
handler = ext.HandleFunc(genjson.Generate)
|
||||
opts, err := json.Marshal(sql.Gen.JSON)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("opts marshal failed: %w", err)
|
||||
}
|
||||
req.PluginOptions = opts
|
||||
|
||||
default:
|
||||
return "", nil, fmt.Errorf("missing language backend")
|
||||
|
|
|
@ -58,8 +58,6 @@ func pluginSettings(r *compiler.Result, cs config.CombinedSettings) *plugin.Sett
|
|||
Overrides: over,
|
||||
Rename: cs.Rename,
|
||||
Codegen: pluginCodegen(cs.Codegen),
|
||||
Go: pluginGoCode(cs.Go),
|
||||
Json: pluginJSONCode(cs.JSON),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,45 +73,6 @@ func pluginCodegen(s config.Codegen) *plugin.Codegen {
|
|||
}
|
||||
}
|
||||
|
||||
func pluginGoCode(s config.SQLGo) *plugin.GoCode {
|
||||
if s.QueryParameterLimit == nil {
|
||||
s.QueryParameterLimit = new(int32)
|
||||
*s.QueryParameterLimit = 1
|
||||
}
|
||||
|
||||
return &plugin.GoCode{
|
||||
EmitInterface: s.EmitInterface,
|
||||
EmitJsonTags: s.EmitJSONTags,
|
||||
JsonTagsIdUppercase: s.JsonTagsIDUppercase,
|
||||
EmitDbTags: s.EmitDBTags,
|
||||
EmitPreparedQueries: s.EmitPreparedQueries,
|
||||
EmitExactTableNames: s.EmitExactTableNames,
|
||||
EmitEmptySlices: s.EmitEmptySlices,
|
||||
EmitExportedQueries: s.EmitExportedQueries,
|
||||
EmitResultStructPointers: s.EmitResultStructPointers,
|
||||
EmitParamsStructPointers: s.EmitParamsStructPointers,
|
||||
EmitMethodsWithDbArgument: s.EmitMethodsWithDBArgument,
|
||||
EmitPointersForNullTypes: s.EmitPointersForNullTypes,
|
||||
EmitEnumValidMethod: s.EmitEnumValidMethod,
|
||||
EmitAllEnumValues: s.EmitAllEnumValues,
|
||||
JsonTagsCaseStyle: s.JSONTagsCaseStyle,
|
||||
Package: s.Package,
|
||||
Out: s.Out,
|
||||
SqlPackage: s.SQLPackage,
|
||||
SqlDriver: s.SQLDriver,
|
||||
OutputDbFileName: s.OutputDBFileName,
|
||||
OutputBatchFileName: s.OutputBatchFileName,
|
||||
OutputModelsFileName: s.OutputModelsFileName,
|
||||
OutputQuerierFileName: s.OutputQuerierFileName,
|
||||
OutputCopyfromFileName: s.OutputCopyFromFileName,
|
||||
OutputFilesSuffix: s.OutputFilesSuffix,
|
||||
InflectionExcludeTableNames: s.InflectionExcludeTableNames,
|
||||
QueryParameterLimit: s.QueryParameterLimit,
|
||||
OmitUnusedStructs: s.OmitUnusedStructs,
|
||||
BuildTags: s.BuildTags,
|
||||
}
|
||||
}
|
||||
|
||||
func pluginGoType(o config.Override) *plugin.ParsedGoType {
|
||||
// Note that there is a slight mismatch between this and the
|
||||
// proto api. The GoType on the override is the unparsed type,
|
||||
|
@ -128,14 +87,6 @@ func pluginGoType(o config.Override) *plugin.ParsedGoType {
|
|||
}
|
||||
}
|
||||
|
||||
func pluginJSONCode(s config.SQLJSON) *plugin.JSONCode {
|
||||
return &plugin.JSONCode{
|
||||
Out: s.Out,
|
||||
Indent: s.Indent,
|
||||
Filename: s.Filename,
|
||||
}
|
||||
}
|
||||
|
||||
func pluginCatalog(c *catalog.Catalog) *plugin.Catalog {
|
||||
var schemas []*plugin.Schema
|
||||
for _, s := range c.Schemas {
|
||||
|
|
|
@ -40,9 +40,9 @@ func TagsToString(tags map[string]string) string {
|
|||
return strings.Join(tagParts, " ")
|
||||
}
|
||||
|
||||
func JSONTagName(name string, settings *plugin.Settings) string {
|
||||
style := settings.Go.JsonTagsCaseStyle
|
||||
idUppercase := settings.Go.JsonTagsIdUppercase
|
||||
func JSONTagName(name string, options *opts) string {
|
||||
style := options.JsonTagsCaseStyle
|
||||
idUppercase := options.JsonTagsIdUppercase
|
||||
if style == "" || style == "none" {
|
||||
return name
|
||||
} else {
|
||||
|
|
|
@ -103,55 +103,60 @@ func (t *tmplCtx) codegenQueryRetval(q Query) (string, error) {
|
|||
}
|
||||
|
||||
func Generate(ctx context.Context, req *plugin.CodeGenRequest) (*plugin.CodeGenResponse, error) {
|
||||
enums := buildEnums(req)
|
||||
structs := buildStructs(req)
|
||||
queries, err := buildQueries(req, structs)
|
||||
options, err := parseOpts(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if req.Settings.Go.OmitUnusedStructs {
|
||||
enums := buildEnums(req, options)
|
||||
structs := buildStructs(req, options)
|
||||
queries, err := buildQueries(req, options, structs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if options.OmitUnusedStructs {
|
||||
enums, structs = filterUnusedStructs(enums, structs, queries)
|
||||
}
|
||||
|
||||
return generate(req, enums, structs, queries)
|
||||
return generate(req, options, enums, structs, queries)
|
||||
}
|
||||
|
||||
func generate(req *plugin.CodeGenRequest, enums []Enum, structs []Struct, queries []Query) (*plugin.CodeGenResponse, error) {
|
||||
func generate(req *plugin.CodeGenRequest, options *opts, enums []Enum, structs []Struct, queries []Query) (*plugin.CodeGenResponse, error) {
|
||||
i := &importer{
|
||||
Settings: req.Settings,
|
||||
Options: options,
|
||||
Queries: queries,
|
||||
Enums: enums,
|
||||
Structs: structs,
|
||||
}
|
||||
|
||||
golang := req.Settings.Go
|
||||
tctx := tmplCtx{
|
||||
EmitInterface: golang.EmitInterface,
|
||||
EmitJSONTags: golang.EmitJsonTags,
|
||||
JsonTagsIDUppercase: golang.JsonTagsIdUppercase,
|
||||
EmitDBTags: golang.EmitDbTags,
|
||||
EmitPreparedQueries: golang.EmitPreparedQueries,
|
||||
EmitEmptySlices: golang.EmitEmptySlices,
|
||||
EmitMethodsWithDBArgument: golang.EmitMethodsWithDbArgument,
|
||||
EmitEnumValidMethod: golang.EmitEnumValidMethod,
|
||||
EmitAllEnumValues: golang.EmitAllEnumValues,
|
||||
EmitInterface: options.EmitInterface,
|
||||
EmitJSONTags: options.EmitJsonTags,
|
||||
JsonTagsIDUppercase: options.JsonTagsIdUppercase,
|
||||
EmitDBTags: options.EmitDbTags,
|
||||
EmitPreparedQueries: options.EmitPreparedQueries,
|
||||
EmitEmptySlices: options.EmitEmptySlices,
|
||||
EmitMethodsWithDBArgument: options.EmitMethodsWithDbArgument,
|
||||
EmitEnumValidMethod: options.EmitEnumValidMethod,
|
||||
EmitAllEnumValues: options.EmitAllEnumValues,
|
||||
UsesCopyFrom: usesCopyFrom(queries),
|
||||
UsesBatch: usesBatch(queries),
|
||||
SQLDriver: parseDriver(golang.SqlPackage),
|
||||
SQLDriver: parseDriver(options.SqlPackage),
|
||||
Q: "`",
|
||||
Package: golang.Package,
|
||||
Package: options.Package,
|
||||
Enums: enums,
|
||||
Structs: structs,
|
||||
SqlcVersion: req.SqlcVersion,
|
||||
BuildTags: golang.BuildTags,
|
||||
BuildTags: options.BuildTags,
|
||||
}
|
||||
|
||||
if tctx.UsesCopyFrom && !tctx.SQLDriver.IsPGX() && golang.SqlDriver != SQLDriverGoSQLDriverMySQL {
|
||||
if tctx.UsesCopyFrom && !tctx.SQLDriver.IsPGX() && options.SqlDriver != SQLDriverGoSQLDriverMySQL {
|
||||
return nil, errors.New(":copyfrom is only supported by pgx and github.com/go-sql-driver/mysql")
|
||||
}
|
||||
|
||||
if tctx.UsesCopyFrom && golang.SqlDriver == SQLDriverGoSQLDriverMySQL {
|
||||
if tctx.UsesCopyFrom && options.SqlDriver == SQLDriverGoSQLDriverMySQL {
|
||||
if err := checkNoTimesForMySQLCopyFrom(queries); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -208,8 +213,8 @@ func generate(req *plugin.CodeGenRequest, enums []Enum, structs []Struct, querie
|
|||
return fmt.Errorf("source error: %w", err)
|
||||
}
|
||||
|
||||
if templateName == "queryFile" && golang.OutputFilesSuffix != "" {
|
||||
name += golang.OutputFilesSuffix
|
||||
if templateName == "queryFile" && options.OutputFilesSuffix != "" {
|
||||
name += options.OutputFilesSuffix
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(name, ".go") {
|
||||
|
@ -220,25 +225,25 @@ func generate(req *plugin.CodeGenRequest, enums []Enum, structs []Struct, querie
|
|||
}
|
||||
|
||||
dbFileName := "db.go"
|
||||
if golang.OutputDbFileName != "" {
|
||||
dbFileName = golang.OutputDbFileName
|
||||
if options.OutputDbFileName != "" {
|
||||
dbFileName = options.OutputDbFileName
|
||||
}
|
||||
modelsFileName := "models.go"
|
||||
if golang.OutputModelsFileName != "" {
|
||||
modelsFileName = golang.OutputModelsFileName
|
||||
if options.OutputModelsFileName != "" {
|
||||
modelsFileName = options.OutputModelsFileName
|
||||
}
|
||||
querierFileName := "querier.go"
|
||||
if golang.OutputQuerierFileName != "" {
|
||||
querierFileName = golang.OutputQuerierFileName
|
||||
if options.OutputQuerierFileName != "" {
|
||||
querierFileName = options.OutputQuerierFileName
|
||||
}
|
||||
copyfromFileName := "copyfrom.go"
|
||||
if golang.OutputCopyfromFileName != "" {
|
||||
copyfromFileName = golang.OutputCopyfromFileName
|
||||
if options.OutputCopyfromFileName != "" {
|
||||
copyfromFileName = options.OutputCopyfromFileName
|
||||
}
|
||||
|
||||
batchFileName := "batch.go"
|
||||
if golang.OutputBatchFileName != "" {
|
||||
batchFileName = golang.OutputBatchFileName
|
||||
if options.OutputBatchFileName != "" {
|
||||
batchFileName = options.OutputBatchFileName
|
||||
}
|
||||
|
||||
if err := execute(dbFileName, "dbFile"); err != nil {
|
||||
|
@ -247,7 +252,7 @@ func generate(req *plugin.CodeGenRequest, enums []Enum, structs []Struct, querie
|
|||
if err := execute(modelsFileName, "modelsFile"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if golang.EmitInterface {
|
||||
if options.EmitInterface {
|
||||
if err := execute(querierFileName, "interfaceFile"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ func addExtraGoStructTags(tags map[string]string, req *plugin.CodeGenRequest, co
|
|||
}
|
||||
}
|
||||
|
||||
func goType(req *plugin.CodeGenRequest, col *plugin.Column) string {
|
||||
func goType(req *plugin.CodeGenRequest, options *opts, col *plugin.Column) string {
|
||||
// Check if the column's type has been overridden
|
||||
for _, oride := range req.Settings.Overrides {
|
||||
if oride.GoType.TypeName == "" {
|
||||
|
@ -49,7 +49,7 @@ func goType(req *plugin.CodeGenRequest, col *plugin.Column) string {
|
|||
return oride.GoType.TypeName
|
||||
}
|
||||
}
|
||||
typ := goInnerType(req, col)
|
||||
typ := goInnerType(req, options, col)
|
||||
if col.IsSqlcSlice {
|
||||
return "[]" + typ
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ func goType(req *plugin.CodeGenRequest, col *plugin.Column) string {
|
|||
return typ
|
||||
}
|
||||
|
||||
func goInnerType(req *plugin.CodeGenRequest, col *plugin.Column) string {
|
||||
func goInnerType(req *plugin.CodeGenRequest, options *opts, col *plugin.Column) string {
|
||||
columnType := sdk.DataType(col.Type)
|
||||
notNull := col.NotNull || col.IsArray
|
||||
|
||||
|
@ -78,7 +78,7 @@ func goInnerType(req *plugin.CodeGenRequest, col *plugin.Column) string {
|
|||
case "mysql":
|
||||
return mysqlType(req, col)
|
||||
case "postgresql":
|
||||
return postgresType(req, col)
|
||||
return postgresType(req, options, col)
|
||||
case "sqlite":
|
||||
return sqliteType(req, col)
|
||||
default:
|
||||
|
|
|
@ -59,6 +59,7 @@ func mergeImports(imps ...fileImports) [][]ImportSpec {
|
|||
|
||||
type importer struct {
|
||||
Settings *plugin.Settings
|
||||
Options *opts
|
||||
Queries []Query
|
||||
Enums []Enum
|
||||
Structs []Struct
|
||||
|
@ -77,24 +78,24 @@ func (i *importer) usesType(typ string) bool {
|
|||
|
||||
func (i *importer) Imports(filename string) [][]ImportSpec {
|
||||
dbFileName := "db.go"
|
||||
if i.Settings.Go.OutputDbFileName != "" {
|
||||
dbFileName = i.Settings.Go.OutputDbFileName
|
||||
if i.Options.OutputDbFileName != "" {
|
||||
dbFileName = i.Options.OutputDbFileName
|
||||
}
|
||||
modelsFileName := "models.go"
|
||||
if i.Settings.Go.OutputModelsFileName != "" {
|
||||
modelsFileName = i.Settings.Go.OutputModelsFileName
|
||||
if i.Options.OutputModelsFileName != "" {
|
||||
modelsFileName = i.Options.OutputModelsFileName
|
||||
}
|
||||
querierFileName := "querier.go"
|
||||
if i.Settings.Go.OutputQuerierFileName != "" {
|
||||
querierFileName = i.Settings.Go.OutputQuerierFileName
|
||||
if i.Options.OutputQuerierFileName != "" {
|
||||
querierFileName = i.Options.OutputQuerierFileName
|
||||
}
|
||||
copyfromFileName := "copyfrom.go"
|
||||
if i.Settings.Go.OutputCopyfromFileName != "" {
|
||||
copyfromFileName = i.Settings.Go.OutputCopyfromFileName
|
||||
if i.Options.OutputCopyfromFileName != "" {
|
||||
copyfromFileName = i.Options.OutputCopyfromFileName
|
||||
}
|
||||
batchFileName := "batch.go"
|
||||
if i.Settings.Go.OutputBatchFileName != "" {
|
||||
batchFileName = i.Settings.Go.OutputBatchFileName
|
||||
if i.Options.OutputBatchFileName != "" {
|
||||
batchFileName = i.Options.OutputBatchFileName
|
||||
}
|
||||
|
||||
switch filename {
|
||||
|
@ -119,7 +120,7 @@ func (i *importer) dbImports() fileImports {
|
|||
{Path: "context"},
|
||||
}
|
||||
|
||||
sqlpkg := parseDriver(i.Settings.Go.SqlPackage)
|
||||
sqlpkg := parseDriver(i.Options.SqlPackage)
|
||||
switch sqlpkg {
|
||||
case SQLDriverPGXV4:
|
||||
pkg = append(pkg, ImportSpec{Path: "github.com/jackc/pgconn"})
|
||||
|
@ -129,7 +130,7 @@ func (i *importer) dbImports() fileImports {
|
|||
pkg = append(pkg, ImportSpec{Path: "github.com/jackc/pgx/v5"})
|
||||
default:
|
||||
std = append(std, ImportSpec{Path: "database/sql"})
|
||||
if i.Settings.Go.EmitPreparedQueries {
|
||||
if i.Options.EmitPreparedQueries {
|
||||
std = append(std, ImportSpec{Path: "fmt"})
|
||||
}
|
||||
}
|
||||
|
@ -155,7 +156,7 @@ var pqtypeTypes = map[string]struct{}{
|
|||
"pqtype.NullRawMessage": {},
|
||||
}
|
||||
|
||||
func buildImports(settings *plugin.Settings, queries []Query, uses func(string) bool) (map[string]struct{}, map[ImportSpec]struct{}) {
|
||||
func buildImports(settings *plugin.Settings, options *opts, queries []Query, uses func(string) bool) (map[string]struct{}, map[ImportSpec]struct{}) {
|
||||
pkg := make(map[ImportSpec]struct{})
|
||||
std := make(map[string]struct{})
|
||||
|
||||
|
@ -163,7 +164,7 @@ func buildImports(settings *plugin.Settings, queries []Query, uses func(string)
|
|||
std["database/sql"] = struct{}{}
|
||||
}
|
||||
|
||||
sqlpkg := parseDriver(settings.Go.SqlPackage)
|
||||
sqlpkg := parseDriver(options.SqlPackage)
|
||||
for _, q := range queries {
|
||||
if q.Cmd == metadata.CmdExecResult {
|
||||
switch sqlpkg {
|
||||
|
@ -235,7 +236,7 @@ func buildImports(settings *plugin.Settings, queries []Query, uses func(string)
|
|||
}
|
||||
|
||||
func (i *importer) interfaceImports() fileImports {
|
||||
std, pkg := buildImports(i.Settings, i.Queries, func(name string) bool {
|
||||
std, pkg := buildImports(i.Settings, i.Options, i.Queries, func(name string) bool {
|
||||
for _, q := range i.Queries {
|
||||
if q.hasRetType() {
|
||||
if usesBatch([]Query{q}) {
|
||||
|
@ -260,7 +261,7 @@ func (i *importer) interfaceImports() fileImports {
|
|||
}
|
||||
|
||||
func (i *importer) modelImports() fileImports {
|
||||
std, pkg := buildImports(i.Settings, nil, i.usesType)
|
||||
std, pkg := buildImports(i.Settings, i.Options, nil, i.usesType)
|
||||
|
||||
if len(i.Enums) > 0 {
|
||||
std["fmt"] = struct{}{}
|
||||
|
@ -299,7 +300,7 @@ func (i *importer) queryImports(filename string) fileImports {
|
|||
}
|
||||
}
|
||||
|
||||
std, pkg := buildImports(i.Settings, gq, func(name string) bool {
|
||||
std, pkg := buildImports(i.Settings, i.Options, gq, func(name string) bool {
|
||||
for _, q := range gq {
|
||||
if q.hasRetType() {
|
||||
if q.Ret.EmitStruct() {
|
||||
|
@ -382,7 +383,7 @@ func (i *importer) queryImports(filename string) fileImports {
|
|||
std["context"] = struct{}{}
|
||||
}
|
||||
|
||||
sqlpkg := parseDriver(i.Settings.Go.SqlPackage)
|
||||
sqlpkg := parseDriver(i.Options.SqlPackage)
|
||||
if sqlcSliceScan() {
|
||||
std["strings"] = struct{}{}
|
||||
}
|
||||
|
@ -400,7 +401,7 @@ func (i *importer) copyfromImports() fileImports {
|
|||
copyFromQueries = append(copyFromQueries, q)
|
||||
}
|
||||
}
|
||||
std, pkg := buildImports(i.Settings, copyFromQueries, func(name string) bool {
|
||||
std, pkg := buildImports(i.Settings, i.Options, copyFromQueries, func(name string) bool {
|
||||
for _, q := range copyFromQueries {
|
||||
if q.hasRetType() {
|
||||
if strings.HasPrefix(q.Ret.Type(), name) {
|
||||
|
@ -417,7 +418,7 @@ func (i *importer) copyfromImports() fileImports {
|
|||
})
|
||||
|
||||
std["context"] = struct{}{}
|
||||
if i.Settings.Go.SqlDriver == SQLDriverGoSQLDriverMySQL {
|
||||
if i.Options.SqlDriver == SQLDriverGoSQLDriverMySQL {
|
||||
std["io"] = struct{}{}
|
||||
std["fmt"] = struct{}{}
|
||||
std["sync/atomic"] = struct{}{}
|
||||
|
@ -435,7 +436,7 @@ func (i *importer) batchImports() fileImports {
|
|||
batchQueries = append(batchQueries, q)
|
||||
}
|
||||
}
|
||||
std, pkg := buildImports(i.Settings, batchQueries, func(name string) bool {
|
||||
std, pkg := buildImports(i.Settings, i.Options, batchQueries, func(name string) bool {
|
||||
for _, q := range batchQueries {
|
||||
if q.hasRetType() {
|
||||
if q.Ret.EmitStruct() {
|
||||
|
@ -467,7 +468,7 @@ func (i *importer) batchImports() fileImports {
|
|||
|
||||
std["context"] = struct{}{}
|
||||
std["errors"] = struct{}{}
|
||||
sqlpkg := parseDriver(i.Settings.Go.SqlPackage)
|
||||
sqlpkg := parseDriver(i.Options.SqlPackage)
|
||||
switch sqlpkg {
|
||||
case SQLDriverPGXV4:
|
||||
pkg[ImportSpec{Path: "github.com/jackc/pgx/v4"}] = struct{}{}
|
||||
|
|
61
internal/codegen/golang/opts.go
Normal file
61
internal/codegen/golang/opts.go
Normal file
|
@ -0,0 +1,61 @@
|
|||
package golang
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/sqlc-dev/sqlc/internal/plugin"
|
||||
)
|
||||
|
||||
type opts struct {
|
||||
EmitInterface bool `json:"emit_interface"`
|
||||
EmitJsonTags bool `json:"emit_json_tags"`
|
||||
JsonTagsIdUppercase bool `json:"json_tags_id_uppercase"`
|
||||
EmitDbTags bool `json:"emit_db_tags"`
|
||||
EmitPreparedQueries bool `json:"emit_prepared_queries"`
|
||||
EmitExactTableNames bool `json:"emit_exact_table_names,omitempty"`
|
||||
EmitEmptySlices bool `json:"emit_empty_slices,omitempty"`
|
||||
EmitExportedQueries bool `json:"emit_exported_queries"`
|
||||
EmitResultStructPointers bool `json:"emit_result_struct_pointers"`
|
||||
EmitParamsStructPointers bool `json:"emit_params_struct_pointers"`
|
||||
EmitMethodsWithDbArgument bool `json:"emit_methods_with_db_argument,omitempty"`
|
||||
EmitPointersForNullTypes bool `json:"emit_pointers_for_null_types"`
|
||||
EmitEnumValidMethod bool `json:"emit_enum_valid_method,omitempty"`
|
||||
EmitAllEnumValues bool `json:"emit_all_enum_values,omitempty"`
|
||||
JsonTagsCaseStyle string `json:"json_tags_case_style,omitempty"`
|
||||
Package string `json:"package"`
|
||||
Out string `json:"out"`
|
||||
SqlPackage string `json:"sql_package"`
|
||||
SqlDriver string `json:"sql_driver"`
|
||||
OutputBatchFileName string `json:"output_batch_file_name,omitempty"`
|
||||
OutputDbFileName string `json:"output_db_file_name,omitempty"`
|
||||
OutputModelsFileName string `json:"output_models_file_name,omitempty"`
|
||||
OutputQuerierFileName string `json:"output_querier_file_name,omitempty"`
|
||||
OutputCopyfromFileName string `json:"output_copyfrom_file_name,omitempty"`
|
||||
OutputFilesSuffix string `json:"output_files_suffix,omitempty"`
|
||||
InflectionExcludeTableNames []string `json:"inflection_exclude_table_names,omitempty"`
|
||||
QueryParameterLimit *int32 `json:"query_parameter_limit,omitempty"`
|
||||
OmitUnusedStructs bool `json:"omit_unused_structs,omitempty"`
|
||||
BuildTags string `json:"build_tags,omitempty"`
|
||||
|
||||
// Unused but left in for parsing convenience
|
||||
Overrides json.RawMessage `json:"overrides,omitempty"`
|
||||
Rename json.RawMessage `json:"rename,omitempty"`
|
||||
}
|
||||
|
||||
func parseOpts(req *plugin.CodeGenRequest) (*opts, error) {
|
||||
var options *opts
|
||||
dec := json.NewDecoder(bytes.NewReader(req.PluginOptions))
|
||||
dec.DisallowUnknownFields()
|
||||
if err := dec.Decode(&options); err != nil {
|
||||
return options, fmt.Errorf("unmarshalling options: %w", err)
|
||||
}
|
||||
|
||||
if options.QueryParameterLimit == nil {
|
||||
options.QueryParameterLimit = new(int32)
|
||||
*options.QueryParameterLimit = 1
|
||||
}
|
||||
|
||||
return options, nil
|
||||
}
|
|
@ -33,11 +33,11 @@ func parseIdentifierString(name string) (*plugin.Identifier, error) {
|
|||
}
|
||||
}
|
||||
|
||||
func postgresType(req *plugin.CodeGenRequest, col *plugin.Column) string {
|
||||
func postgresType(req *plugin.CodeGenRequest, options *opts, col *plugin.Column) string {
|
||||
columnType := sdk.DataType(col.Type)
|
||||
notNull := col.NotNull || col.IsArray
|
||||
driver := parseDriver(req.Settings.Go.SqlPackage)
|
||||
emitPointersForNull := driver.IsPGX() && req.Settings.Go.EmitPointersForNullTypes
|
||||
driver := parseDriver(options.SqlPackage)
|
||||
emitPointersForNull := driver.IsPGX() && options.EmitPointersForNullTypes
|
||||
|
||||
switch columnType {
|
||||
case "serial", "serial4", "pg_catalog.serial4":
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/sqlc-dev/sqlc/internal/plugin"
|
||||
)
|
||||
|
||||
func buildEnums(req *plugin.CodeGenRequest) []Enum {
|
||||
func buildEnums(req *plugin.CodeGenRequest, options *opts) []Enum {
|
||||
var enums []Enum
|
||||
for _, schema := range req.Catalog.Schemas {
|
||||
if schema.Name == "pg_catalog" || schema.Name == "information_schema" {
|
||||
|
@ -31,9 +31,9 @@ func buildEnums(req *plugin.CodeGenRequest) []Enum {
|
|||
NameTags: map[string]string{},
|
||||
ValidTags: map[string]string{},
|
||||
}
|
||||
if req.Settings.Go.EmitJsonTags {
|
||||
e.NameTags["json"] = JSONTagName(enumName, req.Settings)
|
||||
e.ValidTags["json"] = JSONTagName("valid", req.Settings)
|
||||
if options.EmitJsonTags {
|
||||
e.NameTags["json"] = JSONTagName(enumName, options)
|
||||
e.ValidTags["json"] = JSONTagName("valid", options)
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{}, len(enum.Vals))
|
||||
|
@ -58,7 +58,7 @@ func buildEnums(req *plugin.CodeGenRequest) []Enum {
|
|||
return enums
|
||||
}
|
||||
|
||||
func buildStructs(req *plugin.CodeGenRequest) []Struct {
|
||||
func buildStructs(req *plugin.CodeGenRequest, options *opts) []Struct {
|
||||
var structs []Struct
|
||||
for _, schema := range req.Catalog.Schemas {
|
||||
if schema.Name == "pg_catalog" || schema.Name == "information_schema" {
|
||||
|
@ -72,10 +72,10 @@ func buildStructs(req *plugin.CodeGenRequest) []Struct {
|
|||
tableName = schema.Name + "_" + table.Rel.Name
|
||||
}
|
||||
structName := tableName
|
||||
if !req.Settings.Go.EmitExactTableNames {
|
||||
if !options.EmitExactTableNames {
|
||||
structName = inflection.Singular(inflection.SingularParams{
|
||||
Name: structName,
|
||||
Exclusions: req.Settings.Go.InflectionExcludeTableNames,
|
||||
Exclusions: options.InflectionExcludeTableNames,
|
||||
})
|
||||
}
|
||||
s := Struct{
|
||||
|
@ -85,16 +85,16 @@ func buildStructs(req *plugin.CodeGenRequest) []Struct {
|
|||
}
|
||||
for _, column := range table.Columns {
|
||||
tags := map[string]string{}
|
||||
if req.Settings.Go.EmitDbTags {
|
||||
if options.EmitDbTags {
|
||||
tags["db"] = column.Name
|
||||
}
|
||||
if req.Settings.Go.EmitJsonTags {
|
||||
tags["json"] = JSONTagName(column.Name, req.Settings)
|
||||
if options.EmitJsonTags {
|
||||
tags["json"] = JSONTagName(column.Name, options)
|
||||
}
|
||||
addExtraGoStructTags(tags, req, column)
|
||||
s.Fields = append(s.Fields, Field{
|
||||
Name: StructName(column.Name, req.Settings),
|
||||
Type: goType(req, column),
|
||||
Type: goType(req, options, column),
|
||||
Tags: tags,
|
||||
Comment: column.Comment,
|
||||
})
|
||||
|
@ -181,7 +181,7 @@ func argName(name string) string {
|
|||
return out
|
||||
}
|
||||
|
||||
func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error) {
|
||||
func buildQueries(req *plugin.CodeGenRequest, options *opts, structs []Struct) ([]Query, error) {
|
||||
qs := make([]Query, 0, len(req.Queries))
|
||||
for _, query := range req.Queries {
|
||||
if query.Name == "" {
|
||||
|
@ -192,7 +192,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
}
|
||||
|
||||
var constantName string
|
||||
if req.Settings.Go.EmitExportedQueries {
|
||||
if options.EmitExportedQueries {
|
||||
constantName = sdk.Title(query.Name)
|
||||
} else {
|
||||
constantName = sdk.LowerTitle(query.Name)
|
||||
|
@ -208,16 +208,16 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
Comments: query.Comments,
|
||||
Table: query.InsertIntoTable,
|
||||
}
|
||||
sqlpkg := parseDriver(req.Settings.Go.SqlPackage)
|
||||
sqlpkg := parseDriver(options.SqlPackage)
|
||||
|
||||
qpl := int(*req.Settings.Go.QueryParameterLimit)
|
||||
qpl := int(*options.QueryParameterLimit)
|
||||
|
||||
if len(query.Params) == 1 && qpl != 0 {
|
||||
p := query.Params[0]
|
||||
gq.Arg = QueryValue{
|
||||
Name: paramName(p),
|
||||
DBName: p.Column.GetName(),
|
||||
Typ: goType(req, p.Column),
|
||||
Typ: goType(req, options, p.Column),
|
||||
SQLDriver: sqlpkg,
|
||||
Column: p.Column,
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
Column: p.Column,
|
||||
})
|
||||
}
|
||||
s, err := columnsToStruct(req, gq.MethodName+"Params", cols, false)
|
||||
s, err := columnsToStruct(req, options, gq.MethodName+"Params", cols, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -238,7 +238,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
Name: "arg",
|
||||
Struct: s,
|
||||
SQLDriver: sqlpkg,
|
||||
EmitPointer: req.Settings.Go.EmitParamsStructPointers,
|
||||
EmitPointer: options.EmitParamsStructPointers,
|
||||
}
|
||||
|
||||
if len(query.Params) <= qpl {
|
||||
|
@ -255,7 +255,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
gq.Ret = QueryValue{
|
||||
Name: name,
|
||||
DBName: name,
|
||||
Typ: goType(req, c),
|
||||
Typ: goType(req, options, c),
|
||||
SQLDriver: sqlpkg,
|
||||
}
|
||||
} else if putOutColumns(query) {
|
||||
|
@ -270,7 +270,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
for i, f := range s.Fields {
|
||||
c := query.Columns[i]
|
||||
sameName := f.Name == StructName(columnName(c, i), req.Settings)
|
||||
sameType := f.Type == goType(req, c)
|
||||
sameType := f.Type == goType(req, options, c)
|
||||
sameTable := sdk.SameTableName(c.Table, s.Table, req.Catalog.DefaultSchema)
|
||||
if !sameName || !sameType || !sameTable {
|
||||
same = false
|
||||
|
@ -292,7 +292,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
})
|
||||
}
|
||||
var err error
|
||||
gs, err = columnsToStruct(req, gq.MethodName+"Row", columns, true)
|
||||
gs, err = columnsToStruct(req, options, gq.MethodName+"Row", columns, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -303,7 +303,7 @@ func buildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error)
|
|||
Name: "i",
|
||||
Struct: gs,
|
||||
SQLDriver: sqlpkg,
|
||||
EmitPointer: req.Settings.Go.EmitResultStructPointers,
|
||||
EmitPointer: options.EmitResultStructPointers,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -333,7 +333,7 @@ func putOutColumns(query *plugin.Query) bool {
|
|||
// JSON tags: count, count_2, count_2
|
||||
//
|
||||
// This is unlikely to happen, so don't fix it yet
|
||||
func columnsToStruct(req *plugin.CodeGenRequest, name string, columns []goColumn, useID bool) (*Struct, error) {
|
||||
func columnsToStruct(req *plugin.CodeGenRequest, options *opts, name string, columns []goColumn, useID bool) (*Struct, error) {
|
||||
gs := Struct{
|
||||
Name: name,
|
||||
}
|
||||
|
@ -365,11 +365,11 @@ func columnsToStruct(req *plugin.CodeGenRequest, name string, columns []goColumn
|
|||
fieldName = fmt.Sprintf("%s_%d", fieldName, suffix)
|
||||
}
|
||||
tags := map[string]string{}
|
||||
if req.Settings.Go.EmitDbTags {
|
||||
if options.EmitDbTags {
|
||||
tags["db"] = tagName
|
||||
}
|
||||
if req.Settings.Go.EmitJsonTags {
|
||||
tags["json"] = JSONTagName(tagName, req.Settings)
|
||||
if options.EmitJsonTags {
|
||||
tags["json"] = JSONTagName(tagName, options)
|
||||
}
|
||||
addExtraGoStructTags(tags, req, c.Column)
|
||||
f := Field{
|
||||
|
@ -379,7 +379,7 @@ func columnsToStruct(req *plugin.CodeGenRequest, name string, columns []goColumn
|
|||
Column: c.Column,
|
||||
}
|
||||
if c.embed == nil {
|
||||
f.Type = goType(req, c.Column)
|
||||
f.Type = goType(req, options, c.Column)
|
||||
} else {
|
||||
f.Type = c.embed.modelType
|
||||
f.EmbedFields = c.embed.fields
|
||||
|
|
|
@ -11,25 +11,18 @@ import (
|
|||
"github.com/sqlc-dev/sqlc/internal/plugin"
|
||||
)
|
||||
|
||||
func parseOptions(req *plugin.CodeGenRequest) (*plugin.JSONCode, error) {
|
||||
if req.Settings == nil {
|
||||
return new(plugin.JSONCode), nil
|
||||
func parseOptions(req *plugin.CodeGenRequest) (*opts, error) {
|
||||
if len(req.PluginOptions) == 0 {
|
||||
return new(opts), nil
|
||||
}
|
||||
if req.Settings.Codegen != nil {
|
||||
if len(req.Settings.Codegen.Options) != 0 {
|
||||
var options *plugin.JSONCode
|
||||
dec := ejson.NewDecoder(bytes.NewReader(req.Settings.Codegen.Options))
|
||||
dec.DisallowUnknownFields()
|
||||
if err := dec.Decode(&options); err != nil {
|
||||
return options, fmt.Errorf("unmarshalling options: %s", err)
|
||||
}
|
||||
return options, nil
|
||||
}
|
||||
|
||||
var options *opts
|
||||
dec := ejson.NewDecoder(bytes.NewReader(req.PluginOptions))
|
||||
dec.DisallowUnknownFields()
|
||||
if err := dec.Decode(&options); err != nil {
|
||||
return options, fmt.Errorf("unmarshalling options: %s", err)
|
||||
}
|
||||
if req.Settings.Json != nil {
|
||||
return req.Settings.Json, nil
|
||||
}
|
||||
return new(plugin.JSONCode), nil
|
||||
return options, nil
|
||||
}
|
||||
|
||||
func Generate(ctx context.Context, req *plugin.CodeGenRequest) (*plugin.CodeGenResponse, error) {
|
||||
|
|
7
internal/codegen/json/opts.go
Normal file
7
internal/codegen/json/opts.go
Normal file
|
@ -0,0 +1,7 @@
|
|||
package json
|
||||
|
||||
type opts struct {
|
||||
Out string `json:"out"`
|
||||
Indent string `json:"indent,omitempty"`
|
||||
Filename string `json:"filename,omitempty"`
|
||||
}
|
|
@ -14,42 +14,6 @@
|
|||
"out": "",
|
||||
"plugin": "",
|
||||
"options": ""
|
||||
},
|
||||
"go": {
|
||||
"emit_interface": false,
|
||||
"emit_json_tags": false,
|
||||
"emit_db_tags": false,
|
||||
"emit_prepared_queries": false,
|
||||
"emit_exact_table_names": false,
|
||||
"emit_empty_slices": false,
|
||||
"emit_exported_queries": false,
|
||||
"emit_result_struct_pointers": false,
|
||||
"emit_params_struct_pointers": false,
|
||||
"emit_methods_with_db_argument": false,
|
||||
"json_tags_case_style": "",
|
||||
"package": "",
|
||||
"out": "",
|
||||
"sql_package": "",
|
||||
"sql_driver": "",
|
||||
"output_db_file_name": "",
|
||||
"output_models_file_name": "",
|
||||
"output_querier_file_name": "",
|
||||
"output_copyfrom_file_name": "",
|
||||
"output_files_suffix": "",
|
||||
"emit_enum_valid_method": false,
|
||||
"emit_all_enum_values": false,
|
||||
"inflection_exclude_table_names": [],
|
||||
"emit_pointers_for_null_types": false,
|
||||
"query_parameter_limit": 1,
|
||||
"output_batch_file_name": "",
|
||||
"json_tags_id_uppercase": false,
|
||||
"omit_unused_structs": false,
|
||||
"build_tags": ""
|
||||
},
|
||||
"json": {
|
||||
"out": "gen",
|
||||
"indent": " ",
|
||||
"filename": "codegen.json"
|
||||
}
|
||||
},
|
||||
"catalog": {
|
||||
|
@ -65399,5 +65363,5 @@
|
|||
}
|
||||
],
|
||||
"sqlc_version": "v1.22.0",
|
||||
"plugin_options": ""
|
||||
"plugin_options": "eyJvdXQiOiJnZW4iLCJpbmRlbnQiOiIgICIsImZpbGVuYW1lIjoiY29kZWdlbi5qc29uIn0="
|
||||
}
|
||||
|
|
|
@ -14,42 +14,6 @@
|
|||
"out": "gen",
|
||||
"plugin": "jsonb",
|
||||
"options": "eyJmaWxlbmFtZSI6ImNvZGVnZW4uanNvbiIsImluZGVudCI6IiAgIn0="
|
||||
},
|
||||
"go": {
|
||||
"emit_interface": false,
|
||||
"emit_json_tags": false,
|
||||
"emit_db_tags": false,
|
||||
"emit_prepared_queries": false,
|
||||
"emit_exact_table_names": false,
|
||||
"emit_empty_slices": false,
|
||||
"emit_exported_queries": false,
|
||||
"emit_result_struct_pointers": false,
|
||||
"emit_params_struct_pointers": false,
|
||||
"emit_methods_with_db_argument": false,
|
||||
"json_tags_case_style": "",
|
||||
"package": "",
|
||||
"out": "",
|
||||
"sql_package": "",
|
||||
"sql_driver": "",
|
||||
"output_db_file_name": "",
|
||||
"output_models_file_name": "",
|
||||
"output_querier_file_name": "",
|
||||
"output_copyfrom_file_name": "",
|
||||
"output_files_suffix": "",
|
||||
"emit_enum_valid_method": false,
|
||||
"emit_all_enum_values": false,
|
||||
"inflection_exclude_table_names": [],
|
||||
"emit_pointers_for_null_types": false,
|
||||
"query_parameter_limit": 1,
|
||||
"output_batch_file_name": "",
|
||||
"json_tags_id_uppercase": false,
|
||||
"omit_unused_structs": false,
|
||||
"build_tags": ""
|
||||
},
|
||||
"json": {
|
||||
"out": "",
|
||||
"indent": "",
|
||||
"filename": ""
|
||||
}
|
||||
},
|
||||
"catalog": {
|
||||
|
|
|
@ -7,7 +7,6 @@ package wasm
|
|||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
_ "embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -46,7 +46,9 @@ message ParsedGoType {
|
|||
message Settings {
|
||||
// PythonCode message was field 8
|
||||
// KotlinCode message was field 9
|
||||
reserved 8, 9;
|
||||
// GoCode message was field 10;
|
||||
// JSONCode message was field 11;
|
||||
reserved 8, 9, 10, 11;
|
||||
|
||||
string version = 1 [json_name = "version"];
|
||||
string engine = 2 [json_name = "engine"];
|
||||
|
@ -55,10 +57,6 @@ message Settings {
|
|||
map<string, string> rename = 5 [json_name = "rename"];
|
||||
repeated Override overrides = 6 [json_name = "overrides"];
|
||||
Codegen codegen = 12 [json_name = "codegen"];
|
||||
|
||||
// TODO: Refactor codegen settings
|
||||
GoCode go = 10;
|
||||
JSONCode json = 11;
|
||||
}
|
||||
|
||||
message Codegen {
|
||||
|
@ -67,44 +65,6 @@ message Codegen {
|
|||
bytes options = 3 [json_name = "options"];
|
||||
}
|
||||
|
||||
message GoCode {
|
||||
bool emit_interface = 1;
|
||||
bool emit_json_tags = 2;
|
||||
bool emit_db_tags = 3;
|
||||
bool emit_prepared_queries = 4;
|
||||
bool emit_exact_table_names = 5;
|
||||
bool emit_empty_slices = 6;
|
||||
bool emit_exported_queries = 7;
|
||||
bool emit_result_struct_pointers = 8;
|
||||
bool emit_params_struct_pointers = 9;
|
||||
bool emit_methods_with_db_argument = 10;
|
||||
string json_tags_case_style = 11;
|
||||
string package = 12;
|
||||
string out = 13;
|
||||
string sql_package = 14;
|
||||
string sql_driver = 25;
|
||||
string output_db_file_name = 15;
|
||||
string output_models_file_name = 16;
|
||||
string output_querier_file_name = 17;
|
||||
string output_copyfrom_file_name = 28;
|
||||
string output_files_suffix = 18;
|
||||
bool emit_enum_valid_method = 19;
|
||||
bool emit_all_enum_values = 20;
|
||||
repeated string inflection_exclude_table_names = 21;
|
||||
bool emit_pointers_for_null_types = 22;
|
||||
optional int32 query_parameter_limit = 23;
|
||||
string output_batch_file_name = 24;
|
||||
bool json_tags_id_uppercase = 26;
|
||||
bool omit_unused_structs = 27;
|
||||
string build_tags = 29;
|
||||
}
|
||||
|
||||
message JSONCode {
|
||||
string out = 1;
|
||||
string indent = 2;
|
||||
string filename = 3;
|
||||
}
|
||||
|
||||
message Catalog {
|
||||
string comment = 1;
|
||||
string default_schema = 2;
|
||||
|
|
Loading…
Reference in a new issue