diff --git a/internal/compiler/compile.go b/internal/compiler/compile.go index 84fbb20a3c..36e8b9a652 100644 --- a/internal/compiler/compile.go +++ b/internal/compiler/compile.go @@ -1,6 +1,7 @@ package compiler import ( + "context" "errors" "fmt" "io" @@ -8,13 +9,16 @@ import ( "path/filepath" "strings" + "github.com/sqlc-dev/sqlc/internal/metadata" "github.com/sqlc-dev/sqlc/internal/migrations" "github.com/sqlc-dev/sqlc/internal/multierr" "github.com/sqlc-dev/sqlc/internal/opts" "github.com/sqlc-dev/sqlc/internal/rpc" "github.com/sqlc-dev/sqlc/internal/source" "github.com/sqlc-dev/sqlc/internal/sql/ast" + "github.com/sqlc-dev/sqlc/internal/sql/named" "github.com/sqlc-dev/sqlc/internal/sql/sqlerr" + "github.com/sqlc-dev/sqlc/internal/sql/sqlfile" "github.com/sqlc-dev/sqlc/internal/sql/sqlpath" ) @@ -30,6 +34,24 @@ func (c *Compiler) parseCatalog(schemas []string) error { if err != nil { return err } + + // Check if we're in skip_parser mode + skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser + + // If skip_parser is enabled, just read schema files without parsing + if skipParser { + for _, filename := range files { + blob, err := os.ReadFile(filename) + if err != nil { + return fmt.Errorf("reading schema file %s: %w", filename, err) + } + contents := migrations.RemoveRollbackStatements(string(blob)) + c.schema = append(c.schema, contents) + } + return nil + } + + // Normal path: parse and update catalog merr := multierr.New() for _, filename := range files { blob, err := os.ReadFile(filename) @@ -118,3 +140,132 @@ func (c *Compiler) parseQueries(o opts.Parser) (*Result, error) { Queries: q, }, nil } + +// parseQueriesWithAnalyzer parses queries using only the database analyzer, +// skipping the parser and catalog entirely. Uses sqlfile.Split to extract +// individual queries from .sql files. +func (c *Compiler) parseQueriesWithAnalyzer(o opts.Parser) (*Result, error) { + ctx := context.Background() + var q []*Query + merr := multierr.New() + set := map[string]struct{}{} + files, err := sqlpath.Glob(c.conf.Queries) + if err != nil { + return nil, err + } + + if c.analyzer == nil { + return nil, fmt.Errorf("database analyzer is required when skip_parser is enabled") + } + + for _, filename := range files { + blob, err := os.ReadFile(filename) + if err != nil { + merr.Add(filename, "", 0, err) + continue + } + src := string(blob) + + // Use sqlfile.Split to extract individual queries + queries, err := sqlfile.Split(ctx, strings.NewReader(src)) + if err != nil { + merr.Add(filename, src, 0, err) + continue + } + + for _, queryText := range queries { + // Extract metadata from comments + name, cmd, err := metadata.ParseQueryNameAndType(queryText, metadata.CommentSyntax{Dash: true}) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + + // Skip queries without names (not marked with sqlc comments) + if name == "" { + continue + } + + // Check for duplicate query names + if _, exists := set[name]; exists { + merr.Add(filename, queryText, 0, fmt.Errorf("duplicate query name: %s", name)) + continue + } + set[name] = struct{}{} + + // Extract additional metadata from comments + cleanedComments, err := source.CleanedComments(queryText, source.CommentSyntax{Dash: true}) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + + md := metadata.Metadata{ + Name: name, + Cmd: cmd, + Filename: filepath.Base(filename), + } + + md.Params, md.Flags, md.RuleSkiplist, err = metadata.ParseCommentFlags(cleanedComments) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + + // Use the database analyzer to analyze the query + // We pass an empty AST node since we're not using the parser + result, err := c.analyzer.Analyze(ctx, nil, queryText, c.schema, &named.ParamSet{}) + if err != nil { + merr.Add(filename, queryText, 0, err) + // If this rpc unauthenticated error bubbles up, then all future parsing/analysis will fail + if errors.Is(err, rpc.ErrUnauthenticated) { + return nil, merr + } + continue + } + + // Convert analyzer results to Query format + var cols []*Column + for _, col := range result.Columns { + cols = append(cols, convertColumn(col)) + } + + var params []Parameter + for _, p := range result.Params { + params = append(params, Parameter{ + Number: int(p.Number), + Column: convertColumn(p.Column), + }) + } + + // Strip comments from the final SQL + trimmed, comments, err := source.StripComments(queryText) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + md.Comments = comments + + query := &Query{ + SQL: trimmed, + Metadata: md, + Columns: cols, + Params: params, + } + + q = append(q, query) + } + } + + if len(merr.Errs()) > 0 { + return nil, merr + } + if len(q) == 0 { + return nil, fmt.Errorf("no queries contained in paths %s", strings.Join(c.conf.Queries, ",")) + } + + return &Result{ + Catalog: nil, // No catalog when skip_parser is enabled + Queries: q, + }, nil +} diff --git a/internal/compiler/engine.go b/internal/compiler/engine.go index f742bfd999..5b0f5d12cd 100644 --- a/internal/compiler/engine.go +++ b/internal/compiler/engine.go @@ -36,6 +36,23 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err c.client = client } + // Check if skip_parser is enabled + skipParser := conf.Analyzer.SkipParser != nil && *conf.Analyzer.SkipParser + + // If skip_parser is enabled, we must have database analyzer enabled + if skipParser { + if conf.Database == nil { + return nil, fmt.Errorf("skip_parser requires database configuration") + } + if conf.Analyzer.Database != nil && !*conf.Analyzer.Database { + return nil, fmt.Errorf("skip_parser requires database analyzer to be enabled") + } + // Only PostgreSQL is supported for now + if conf.Engine != config.EnginePostgreSQL { + return nil, fmt.Errorf("skip_parser is only supported for PostgreSQL") + } + } + switch conf.Engine { case config.EngineSQLite: c.parser = sqlite.NewParser() @@ -46,8 +63,11 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err c.catalog = dolphin.NewCatalog() c.selector = newDefaultSelector() case config.EnginePostgreSQL: - c.parser = postgresql.NewParser() - c.catalog = postgresql.NewCatalog() + // Skip parser and catalog if skip_parser is enabled + if !skipParser { + c.parser = postgresql.NewParser() + c.catalog = postgresql.NewCatalog() + } c.selector = newDefaultSelector() if conf.Database != nil { if conf.Analyzer.Database == nil || *conf.Analyzer.Database { @@ -73,7 +93,20 @@ func (c *Compiler) ParseCatalog(schema []string) error { } func (c *Compiler) ParseQueries(queries []string, o opts.Parser) error { - r, err := c.parseQueries(o) + // Check if skip_parser is enabled + skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser + + var r *Result + var err error + + if skipParser { + // Use database analyzer only, skip parser and catalog + r, err = c.parseQueriesWithAnalyzer(o) + } else { + // Use traditional parser-based approach + r, err = c.parseQueries(o) + } + if err != nil { return err } diff --git a/internal/config/config.go b/internal/config/config.go index 0ff805fccd..56b6535f93 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -123,7 +123,8 @@ type SQL struct { } type Analyzer struct { - Database *bool `json:"database" yaml:"database"` + Database *bool `json:"database" yaml:"database"` + SkipParser *bool `json:"skip_parser" yaml:"skip_parser"` } // TODO: Figure out a better name for this diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json new file mode 100644 index 0000000000..ee1b7ecd9e --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json @@ -0,0 +1,3 @@ +{ + "contexts": ["managed-db"] +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go new file mode 100644 index 0000000000..1e00549714 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go @@ -0,0 +1,32 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 + +package querytest + +import ( + "context" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" +) + +type DBTX interface { + Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) + Query(context.Context, string, ...interface{}) (pgx.Rows, error) + QueryRow(context.Context, string, ...interface{}) pgx.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx pgx.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go new file mode 100644 index 0000000000..f64b1f4161 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go @@ -0,0 +1,16 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 + +package querytest + +import ( + "github.com/jackc/pgx/v5/pgtype" +) + +type Product struct { + ID int64 + Name string + Price pgtype.Numeric + Tags []string +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go new file mode 100644 index 0000000000..e91e1de4cd --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go @@ -0,0 +1,85 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: query.sql + +package querytest + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createProduct = `-- name: CreateProduct :one +INSERT INTO products (name, price, tags) +VALUES ($1, $2, $3) +RETURNING id, name, price, tags +` + +type CreateProductParams struct { + Name string + Price pgtype.Numeric + Tags []string +} + +func (q *Queries) CreateProduct(ctx context.Context, arg CreateProductParams) (Product, error) { + row := q.db.QueryRow(ctx, createProduct, arg.Name, arg.Price, arg.Tags) + var i Product + err := row.Scan( + &i.ID, + &i.Name, + &i.Price, + &i.Tags, + ) + return i, err +} + +const getProduct = `-- name: GetProduct :one +SELECT id, name, price, tags +FROM products +WHERE id = $1 +` + +func (q *Queries) GetProduct(ctx context.Context, id int64) (Product, error) { + row := q.db.QueryRow(ctx, getProduct, id) + var i Product + err := row.Scan( + &i.ID, + &i.Name, + &i.Price, + &i.Tags, + ) + return i, err +} + +const listProducts = `-- name: ListProducts :many +SELECT id, name, price, tags +FROM products +ORDER BY id +` + +func (q *Queries) ListProducts(ctx context.Context) ([]Product, error) { + rows, err := q.db.Query(ctx, listProducts) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Product + for rows.Next() { + var i Product + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Price, + &i.Tags, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql new file mode 100644 index 0000000000..24da729235 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql @@ -0,0 +1,14 @@ +-- name: GetProduct :one +SELECT id, name, price, tags +FROM products +WHERE id = $1; + +-- name: ListProducts :many +SELECT id, name, price, tags +FROM products +ORDER BY id; + +-- name: CreateProduct :one +INSERT INTO products (name, price, tags) +VALUES ($1, $2, $3) +RETURNING id, name, price, tags; diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql new file mode 100644 index 0000000000..2b9fc7e5c1 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql @@ -0,0 +1,6 @@ +CREATE TABLE products ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL, + price NUMERIC(10, 2) NOT NULL, + tags TEXT[] +); diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json new file mode 100644 index 0000000000..445916f44f --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json @@ -0,0 +1,24 @@ +{ + "version": "2", + "sql": [ + { + "name": "querytest", + "engine": "postgresql", + "queries": "query.sql", + "schema": "schema.sql", + "database": { + "managed": true + }, + "analyzer": { + "skip_parser": true + }, + "gen": { + "go": { + "package": "querytest", + "sql_package": "pgx/v5", + "out": "go" + } + } + } + ] +} diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql new file mode 100644 index 0000000000..7f38820027 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql @@ -0,0 +1,2 @@ +-- name: GetTest :one +SELECT id FROM test WHERE id = $1; diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql new file mode 100644 index 0000000000..48038dc8e8 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql @@ -0,0 +1 @@ +CREATE TABLE test (id INT); diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json new file mode 100644 index 0000000000..82d1077270 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json @@ -0,0 +1,20 @@ +{ + "version": "2", + "sql": [ + { + "name": "test", + "engine": "postgresql", + "queries": "query.sql", + "schema": "schema.sql", + "analyzer": { + "skip_parser": true + }, + "gen": { + "go": { + "package": "test", + "out": "go" + } + } + } + ] +} diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt new file mode 100644 index 0000000000..594ae8755b --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt @@ -0,0 +1 @@ +error creating compiler: skip_parser requires database configuration