From 823e3e12b6694e706f8b80f04be409345d8c0446 Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 23 Oct 2025 16:58:41 +0000 Subject: [PATCH 1/4] Add analyzer.skip_parser config option for PostgreSQL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds a new configuration option `analyzer.skip_parser` that allows sqlc to skip the parser and catalog entirely, relying solely on the database analyzer for query analysis. This is particularly useful when: - Working with complex PostgreSQL syntax not fully supported by the parser - Wanting to ensure queries are validated against the actual database schema - Dealing with database-specific features or extensions Key changes: - Add `skip_parser` field to `Analyzer` config struct - Implement `parseQueriesWithAnalyzer` method using sqlfile.Split - Skip parser and catalog initialization when `skip_parser` is enabled - Add validation requiring database analyzer when using skip_parser - Only PostgreSQL is supported for this feature initially Usage example: ```yaml version: "2" sql: - name: "example" engine: "postgresql" queries: "query.sql" schema: [] database: uri: "postgresql://user:pass@localhost:5432/db" analyzer: skip_parser: true gen: go: package: "db" out: "db" ``` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- internal/cmd/generate.go | 30 +++++--- internal/compiler/compile.go | 133 +++++++++++++++++++++++++++++++++++ internal/compiler/engine.go | 39 +++++++++- internal/config/config.go | 3 +- 4 files changed, 190 insertions(+), 15 deletions(-) diff --git a/internal/cmd/generate.go b/internal/cmd/generate.go index 00e8871c7e..34ac4d4965 100644 --- a/internal/cmd/generate.go +++ b/internal/cmd/generate.go @@ -305,20 +305,28 @@ func parse(ctx context.Context, name, dir string, sql config.SQL, combo config.C fmt.Fprintf(stderr, "error creating compiler: %s\n", err) return nil, true } - if err := c.ParseCatalog(sql.Schema); err != nil { - fmt.Fprintf(stderr, "# package %s\n", name) - if parserErr, ok := err.(*multierr.Error); ok { - for _, fileErr := range parserErr.Errs() { - printFileErr(stderr, dir, fileErr) + + // Check if skip_parser is enabled + skipParser := sql.Analyzer.SkipParser != nil && *sql.Analyzer.SkipParser + + // Skip catalog parsing if skip_parser is enabled + if !skipParser { + if err := c.ParseCatalog(sql.Schema); err != nil { + fmt.Fprintf(stderr, "# package %s\n", name) + if parserErr, ok := err.(*multierr.Error); ok { + for _, fileErr := range parserErr.Errs() { + printFileErr(stderr, dir, fileErr) + } + } else { + fmt.Fprintf(stderr, "error parsing schema: %s\n", err) } - } else { - fmt.Fprintf(stderr, "error parsing schema: %s\n", err) + return nil, true + } + if parserOpts.Debug.DumpCatalog { + debug.Dump(c.Catalog()) } - return nil, true - } - if parserOpts.Debug.DumpCatalog { - debug.Dump(c.Catalog()) } + if err := c.ParseQueries(sql.Queries, parserOpts); err != nil { fmt.Fprintf(stderr, "# package %s\n", name) if parserErr, ok := err.(*multierr.Error); ok { diff --git a/internal/compiler/compile.go b/internal/compiler/compile.go index 84fbb20a3c..43ec3c04f9 100644 --- a/internal/compiler/compile.go +++ b/internal/compiler/compile.go @@ -1,6 +1,7 @@ package compiler import ( + "context" "errors" "fmt" "io" @@ -8,13 +9,16 @@ import ( "path/filepath" "strings" + "github.com/sqlc-dev/sqlc/internal/metadata" "github.com/sqlc-dev/sqlc/internal/migrations" "github.com/sqlc-dev/sqlc/internal/multierr" "github.com/sqlc-dev/sqlc/internal/opts" "github.com/sqlc-dev/sqlc/internal/rpc" "github.com/sqlc-dev/sqlc/internal/source" "github.com/sqlc-dev/sqlc/internal/sql/ast" + "github.com/sqlc-dev/sqlc/internal/sql/named" "github.com/sqlc-dev/sqlc/internal/sql/sqlerr" + "github.com/sqlc-dev/sqlc/internal/sql/sqlfile" "github.com/sqlc-dev/sqlc/internal/sql/sqlpath" ) @@ -118,3 +122,132 @@ func (c *Compiler) parseQueries(o opts.Parser) (*Result, error) { Queries: q, }, nil } + +// parseQueriesWithAnalyzer parses queries using only the database analyzer, +// skipping the parser and catalog entirely. Uses sqlfile.Split to extract +// individual queries from .sql files. +func (c *Compiler) parseQueriesWithAnalyzer(o opts.Parser) (*Result, error) { + ctx := context.Background() + var q []*Query + merr := multierr.New() + set := map[string]struct{}{} + files, err := sqlpath.Glob(c.conf.Queries) + if err != nil { + return nil, err + } + + if c.analyzer == nil { + return nil, fmt.Errorf("database analyzer is required when skip_parser is enabled") + } + + for _, filename := range files { + blob, err := os.ReadFile(filename) + if err != nil { + merr.Add(filename, "", 0, err) + continue + } + src := string(blob) + + // Use sqlfile.Split to extract individual queries + queries, err := sqlfile.Split(ctx, strings.NewReader(src)) + if err != nil { + merr.Add(filename, src, 0, err) + continue + } + + for _, queryText := range queries { + // Extract metadata from comments + name, cmd, err := metadata.ParseQueryNameAndType(queryText, metadata.CommentSyntax{Dash: true}) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + + // Skip queries without names (not marked with sqlc comments) + if name == "" { + continue + } + + // Check for duplicate query names + if _, exists := set[name]; exists { + merr.Add(filename, queryText, 0, fmt.Errorf("duplicate query name: %s", name)) + continue + } + set[name] = struct{}{} + + // Extract additional metadata from comments + cleanedComments, err := source.CleanedComments(queryText, source.CommentSyntax{Dash: true}) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + + md := metadata.Metadata{ + Name: name, + Cmd: cmd, + Filename: filepath.Base(filename), + } + + md.Params, md.Flags, md.RuleSkiplist, err = metadata.ParseCommentFlags(cleanedComments) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + + // Use the database analyzer to analyze the query + // We pass an empty AST node since we're not using the parser + result, err := c.analyzer.Analyze(ctx, nil, queryText, c.schema, &named.ParamSet{}) + if err != nil { + merr.Add(filename, queryText, 0, err) + // If this rpc unauthenticated error bubbles up, then all future parsing/analysis will fail + if errors.Is(err, rpc.ErrUnauthenticated) { + return nil, merr + } + continue + } + + // Convert analyzer results to Query format + var cols []*Column + for _, col := range result.Columns { + cols = append(cols, convertColumn(col)) + } + + var params []Parameter + for _, p := range result.Params { + params = append(params, Parameter{ + Number: int(p.Number), + Column: convertColumn(p.Column), + }) + } + + // Strip comments from the final SQL + trimmed, comments, err := source.StripComments(queryText) + if err != nil { + merr.Add(filename, queryText, 0, err) + continue + } + md.Comments = comments + + query := &Query{ + SQL: trimmed, + Metadata: md, + Columns: cols, + Params: params, + } + + q = append(q, query) + } + } + + if len(merr.Errs()) > 0 { + return nil, merr + } + if len(q) == 0 { + return nil, fmt.Errorf("no queries contained in paths %s", strings.Join(c.conf.Queries, ",")) + } + + return &Result{ + Catalog: nil, // No catalog when skip_parser is enabled + Queries: q, + }, nil +} diff --git a/internal/compiler/engine.go b/internal/compiler/engine.go index f742bfd999..5b0f5d12cd 100644 --- a/internal/compiler/engine.go +++ b/internal/compiler/engine.go @@ -36,6 +36,23 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err c.client = client } + // Check if skip_parser is enabled + skipParser := conf.Analyzer.SkipParser != nil && *conf.Analyzer.SkipParser + + // If skip_parser is enabled, we must have database analyzer enabled + if skipParser { + if conf.Database == nil { + return nil, fmt.Errorf("skip_parser requires database configuration") + } + if conf.Analyzer.Database != nil && !*conf.Analyzer.Database { + return nil, fmt.Errorf("skip_parser requires database analyzer to be enabled") + } + // Only PostgreSQL is supported for now + if conf.Engine != config.EnginePostgreSQL { + return nil, fmt.Errorf("skip_parser is only supported for PostgreSQL") + } + } + switch conf.Engine { case config.EngineSQLite: c.parser = sqlite.NewParser() @@ -46,8 +63,11 @@ func NewCompiler(conf config.SQL, combo config.CombinedSettings) (*Compiler, err c.catalog = dolphin.NewCatalog() c.selector = newDefaultSelector() case config.EnginePostgreSQL: - c.parser = postgresql.NewParser() - c.catalog = postgresql.NewCatalog() + // Skip parser and catalog if skip_parser is enabled + if !skipParser { + c.parser = postgresql.NewParser() + c.catalog = postgresql.NewCatalog() + } c.selector = newDefaultSelector() if conf.Database != nil { if conf.Analyzer.Database == nil || *conf.Analyzer.Database { @@ -73,7 +93,20 @@ func (c *Compiler) ParseCatalog(schema []string) error { } func (c *Compiler) ParseQueries(queries []string, o opts.Parser) error { - r, err := c.parseQueries(o) + // Check if skip_parser is enabled + skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser + + var r *Result + var err error + + if skipParser { + // Use database analyzer only, skip parser and catalog + r, err = c.parseQueriesWithAnalyzer(o) + } else { + // Use traditional parser-based approach + r, err = c.parseQueries(o) + } + if err != nil { return err } diff --git a/internal/config/config.go b/internal/config/config.go index 0ff805fccd..56b6535f93 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -123,7 +123,8 @@ type SQL struct { } type Analyzer struct { - Database *bool `json:"database" yaml:"database"` + Database *bool `json:"database" yaml:"database"` + SkipParser *bool `json:"skip_parser" yaml:"skip_parser"` } // TODO: Figure out a better name for this From 6b5e633d3c447bb340d2b08b4fca48393e719a02 Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 23 Oct 2025 17:14:55 +0000 Subject: [PATCH 2/4] Add comprehensive tests for skip_parser feature MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds multiple levels of testing for the skip_parser feature: ## Unit Tests ### Config Tests (internal/config/skip_parser_test.go) - Test parsing skip_parser: true from YAML - Test default behavior (nil) - Test explicit skip_parser: false ### Compiler Validation Tests (internal/compiler/skip_parser_test.go) - Test that skip_parser requires database configuration - Test that skip_parser requires database analyzer enabled - Test that skip_parser only works with PostgreSQL - Test valid skip_parser configuration - Test normal operation with skip_parser disabled/default ### Query Splitting Tests (internal/compiler/split_test.go) - Test splitting multiple queries from a single file - Test complex queries with CASE statements and operators - Test PostgreSQL dollar-quoted strings ($$) - Test queries with comments ## End-to-End Example (examples/skip_parser/) ### Files - schema.sql: PostgreSQL schema with arrays, JSONB, indexes - query.sql: CRUD operations testing various PostgreSQL features - sqlc.yaml: Configuration with skip_parser: true - db_test.go: Comprehensive integration tests - README.md: Documentation and usage instructions ### Features Tested - BIGSERIAL auto-increment - NUMERIC decimal types - TIMESTAMPTZ timestamps - TEXT[] arrays - JSONB binary JSON - ANY() array operators - GIN indexes - RETURNING clauses All tests pass successfully: - go test ./internal/config -run TestSkipParser ✓ - go test ./internal/compiler -run TestSkipParser ✓ - go test ./internal/compiler -run TestSqlfileSplit ✓ 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- examples/skip_parser/README.md | 118 ++++++++++++++ examples/skip_parser/postgresql/.gitignore | 4 + examples/skip_parser/postgresql/db_test.go | 165 +++++++++++++++++++ examples/skip_parser/postgresql/query.sql | 34 ++++ examples/skip_parser/postgresql/schema.sql | 11 ++ examples/skip_parser/sqlc.yaml | 15 ++ internal/compiler/skip_parser_test.go | 170 ++++++++++++++++++++ internal/compiler/split_test.go | 176 +++++++++++++++++++++ internal/config/skip_parser_test.go | 105 ++++++++++++ 9 files changed, 798 insertions(+) create mode 100644 examples/skip_parser/README.md create mode 100644 examples/skip_parser/postgresql/.gitignore create mode 100644 examples/skip_parser/postgresql/db_test.go create mode 100644 examples/skip_parser/postgresql/query.sql create mode 100644 examples/skip_parser/postgresql/schema.sql create mode 100644 examples/skip_parser/sqlc.yaml create mode 100644 internal/compiler/skip_parser_test.go create mode 100644 internal/compiler/split_test.go create mode 100644 internal/config/skip_parser_test.go diff --git a/examples/skip_parser/README.md b/examples/skip_parser/README.md new file mode 100644 index 0000000000..599f01ad14 --- /dev/null +++ b/examples/skip_parser/README.md @@ -0,0 +1,118 @@ +# Skip Parser Example + +This example demonstrates the `analyzer.skip_parser` configuration option for PostgreSQL. + +## What is skip_parser? + +When `analyzer.skip_parser: true` is set in the configuration, sqlc will: + +1. **Skip the parser** - No parsing of SQL schema or query files +2. **Skip the catalog** - No building of the internal schema catalog +3. **Use database analyzer only** - Rely entirely on the PostgreSQL database for query analysis + +This is useful when: +- Working with complex PostgreSQL syntax not fully supported by the parser +- You want to ensure queries are validated against the actual database schema +- Using database-specific features or extensions + +## Configuration + +See `sqlc.yaml` for the configuration: + +```yaml +version: '2' +sql: +- name: postgresql + schema: postgresql/schema.sql + queries: postgresql/query.sql + engine: postgresql + database: + uri: "${SKIP_PARSER_TEST_POSTGRES}" + analyzer: + skip_parser: true # This enables the feature + gen: + go: + package: skipparser + sql_package: pgx/v5 + out: postgresql +``` + +## How It Works + +1. The schema file (`schema.sql`) is **NOT** parsed by sqlc +2. The schema must be applied to the database separately (tests do this automatically) +3. Query files (`query.sql`) are split using `sqlfile.Split` +4. Each query is sent to PostgreSQL's analyzer for validation +5. Column and parameter types are retrieved from the database +6. Code is generated based on the database analysis + +## Running the Tests + +### Prerequisites + +- PostgreSQL server running and accessible +- Set the `SKIP_PARSER_TEST_POSTGRES` environment variable + +### Option 1: Using Docker Compose + +```bash +# Start PostgreSQL +docker compose up -d + +# Set environment variable +export SKIP_PARSER_TEST_POSTGRES="postgresql://postgres:mysecretpassword@localhost:5432/postgres" + +# Run the test +go test -tags=examples ./examples/skip_parser/postgresql +``` + +### Option 2: Using existing PostgreSQL + +```bash +# Set environment variable to your PostgreSQL instance +export SKIP_PARSER_TEST_POSTGRES="postgresql://user:pass@localhost:5432/dbname" + +# Run the test +go test -tags=examples ./examples/skip_parser/postgresql +``` + +### Generating Code + +```bash +# Make sure database is running and accessible +export SKIP_PARSER_TEST_POSTGRES="postgresql://postgres:mysecretpassword@localhost:5432/postgres" + +# Generate code +cd examples/skip_parser +sqlc generate +``` + +## Tests Included + +The `db_test.go` file includes comprehensive tests: + +### TestSkipParser +- Creates a product with arrays and JSON fields +- Tests all CRUD operations (Create, Read, Update, Delete) +- Tests list and search operations +- Tests counting + +### TestSkipParserComplexTypes +- Tests PostgreSQL-specific types (arrays, JSONB) +- Tests handling of nil/empty values +- Validates array and JSON handling + +## Features Tested + +This example tests the following PostgreSQL features with skip_parser: + +- **BIGSERIAL** - Auto-incrementing primary keys +- **NUMERIC** - Decimal types +- **TIMESTAMPTZ** - Timestamps with timezone +- **TEXT[]** - Text arrays +- **JSONB** - Binary JSON storage +- **ANY operator** - Array containment queries +- **GIN indexes** - Generalized inverted indexes for arrays +- **RETURNING clause** - Return values from INSERT/UPDATE + +All of these are validated directly by PostgreSQL without parser involvement! diff --git a/examples/skip_parser/postgresql/.gitignore b/examples/skip_parser/postgresql/.gitignore new file mode 100644 index 0000000000..f340015a6d --- /dev/null +++ b/examples/skip_parser/postgresql/.gitignore @@ -0,0 +1,4 @@ +# Generated files - run 'sqlc generate' to create them +db.go +models.go +query.sql.go diff --git a/examples/skip_parser/postgresql/db_test.go b/examples/skip_parser/postgresql/db_test.go new file mode 100644 index 0000000000..427b037bc0 --- /dev/null +++ b/examples/skip_parser/postgresql/db_test.go @@ -0,0 +1,165 @@ +//go:build examples + +package skipparser + +import ( + "context" + "testing" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" + + "github.com/sqlc-dev/sqlc/internal/sqltest/local" +) + +func TestSkipParser(t *testing.T) { + ctx := context.Background() + uri := local.PostgreSQL(t, []string{"schema.sql"}) + db, err := pgx.Connect(ctx, uri) + if err != nil { + t.Fatal(err) + } + defer db.Close(ctx) + + q := New(db) + + // Test CountProducts on empty database + count, err := q.CountProducts(ctx) + if err != nil { + t.Fatal(err) + } + if count != 0 { + t.Errorf("expected 0 products, got %d", count) + } + + // Test CreateProduct + product, err := q.CreateProduct(ctx, CreateProductParams{ + Name: "Test Product", + Price: "99.99", + Tags: []string{"electronics", "test"}, + Metadata: []byte(`{"color": "blue", "weight": 1.5}`), + }) + if err != nil { + t.Fatal(err) + } + if product.ID == 0 { + t.Error("expected product ID to be non-zero") + } + if product.Name != "Test Product" { + t.Errorf("expected name 'Test Product', got %s", product.Name) + } + t.Logf("Created product: %+v", product) + + // Test GetProduct + fetchedProduct, err := q.GetProduct(ctx, product.ID) + if err != nil { + t.Fatal(err) + } + if fetchedProduct.ID != product.ID { + t.Errorf("expected ID %d, got %d", product.ID, fetchedProduct.ID) + } + t.Logf("Fetched product: %+v", fetchedProduct) + + // Test ListProducts + products, err := q.ListProducts(ctx, ListProductsParams{ + Limit: 10, + Offset: 0, + }) + if err != nil { + t.Fatal(err) + } + if len(products) != 1 { + t.Errorf("expected 1 product, got %d", len(products)) + } + t.Logf("Listed products: %+v", products) + + // Test UpdateProduct + updatedProduct, err := q.UpdateProduct(ctx, UpdateProductParams{ + ID: product.ID, + Name: "Updated Product", + Price: "149.99", + Tags: []string{"electronics", "updated"}, + Metadata: []byte(`{"color": "red", "weight": 2.0}`), + }) + if err != nil { + t.Fatal(err) + } + if updatedProduct.Name != "Updated Product" { + t.Errorf("expected name 'Updated Product', got %s", updatedProduct.Name) + } + t.Logf("Updated product: %+v", updatedProduct) + + // Test SearchProductsByTag + tagProducts, err := q.SearchProductsByTag(ctx, "electronics") + if err != nil { + t.Fatal(err) + } + if len(tagProducts) != 1 { + t.Errorf("expected 1 product with tag 'electronics', got %d", len(tagProducts)) + } + t.Logf("Products with tag 'electronics': %+v", tagProducts) + + // Test CountProducts after insert + count, err = q.CountProducts(ctx) + if err != nil { + t.Fatal(err) + } + if count != 1 { + t.Errorf("expected 1 product, got %d", count) + } + + // Test DeleteProduct + err = q.DeleteProduct(ctx, product.ID) + if err != nil { + t.Fatal(err) + } + + // Verify deletion + count, err = q.CountProducts(ctx) + if err != nil { + t.Fatal(err) + } + if count != 0 { + t.Errorf("expected 0 products after deletion, got %d", count) + } +} + +func TestSkipParserComplexTypes(t *testing.T) { + ctx := context.Background() + uri := local.PostgreSQL(t, []string{"schema.sql"}) + db, err := pgx.Connect(ctx, uri) + if err != nil { + t.Fatal(err) + } + defer db.Close(ctx) + + q := New(db) + + // Test with empty arrays and JSON + product, err := q.CreateProduct(ctx, CreateProductParams{ + Name: "Minimal Product", + Price: "19.99", + Tags: []string{}, + Metadata: []byte(`{}`), + }) + if err != nil { + t.Fatal(err) + } + t.Logf("Created minimal product: %+v", product) + + // Test with nil values where allowed (using pgtype for nullable fields) + product2, err := q.CreateProduct(ctx, CreateProductParams{ + Name: "Another Product", + Price: "29.99", + Tags: nil, + Metadata: nil, + }) + if err != nil { + t.Fatal(err) + } + t.Logf("Created product with nil arrays: %+v", product2) + + // Cleanup + _ = q.DeleteProduct(ctx, product.ID) + _ = q.DeleteProduct(ctx, product2.ID) +} diff --git a/examples/skip_parser/postgresql/query.sql b/examples/skip_parser/postgresql/query.sql new file mode 100644 index 0000000000..ce24114d0d --- /dev/null +++ b/examples/skip_parser/postgresql/query.sql @@ -0,0 +1,34 @@ +-- name: GetProduct :one +SELECT id, name, price, created_at, tags, metadata +FROM products +WHERE id = $1; + +-- name: ListProducts :many +SELECT id, name, price, created_at, tags, metadata +FROM products +ORDER BY created_at DESC +LIMIT $1 OFFSET $2; + +-- name: CreateProduct :one +INSERT INTO products (name, price, tags, metadata) +VALUES ($1, $2, $3, $4) +RETURNING id, name, price, created_at, tags, metadata; + +-- name: UpdateProduct :one +UPDATE products +SET name = $2, price = $3, tags = $4, metadata = $5 +WHERE id = $1 +RETURNING id, name, price, created_at, tags, metadata; + +-- name: DeleteProduct :exec +DELETE FROM products +WHERE id = $1; + +-- name: SearchProductsByTag :many +SELECT id, name, price, created_at, tags, metadata +FROM products +WHERE $1 = ANY(tags) +ORDER BY created_at DESC; + +-- name: CountProducts :one +SELECT COUNT(*) FROM products; diff --git a/examples/skip_parser/postgresql/schema.sql b/examples/skip_parser/postgresql/schema.sql new file mode 100644 index 0000000000..d14fa37395 --- /dev/null +++ b/examples/skip_parser/postgresql/schema.sql @@ -0,0 +1,11 @@ +CREATE TABLE products ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL, + price NUMERIC(10, 2) NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + tags TEXT[] DEFAULT '{}', + metadata JSONB DEFAULT '{}' +); + +CREATE INDEX idx_products_created_at ON products(created_at); +CREATE INDEX idx_products_tags ON products USING GIN(tags); diff --git a/examples/skip_parser/sqlc.yaml b/examples/skip_parser/sqlc.yaml new file mode 100644 index 0000000000..d2d47817be --- /dev/null +++ b/examples/skip_parser/sqlc.yaml @@ -0,0 +1,15 @@ +version: '2' +sql: +- name: postgresql + schema: postgresql/schema.sql + queries: postgresql/query.sql + engine: postgresql + database: + uri: "${SKIP_PARSER_TEST_POSTGRES}" + analyzer: + skip_parser: true + gen: + go: + package: skipparser + sql_package: pgx/v5 + out: postgresql diff --git a/internal/compiler/skip_parser_test.go b/internal/compiler/skip_parser_test.go new file mode 100644 index 0000000000..50a59376e6 --- /dev/null +++ b/internal/compiler/skip_parser_test.go @@ -0,0 +1,170 @@ +package compiler + +import ( + "testing" + + "github.com/sqlc-dev/sqlc/internal/config" +) + +func TestSkipParserRequiresDatabase(t *testing.T) { + skipParser := true + conf := config.SQL{ + Engine: config.EnginePostgreSQL, + Analyzer: config.Analyzer{ + SkipParser: &skipParser, + }, + } + + combo := config.CombinedSettings{ + Package: conf, + } + + _, err := NewCompiler(conf, combo) + if err == nil { + t.Fatal("expected error when skip_parser is true without database config") + } + if err.Error() != "skip_parser requires database configuration" { + t.Errorf("unexpected error message: %s", err) + } +} + +func TestSkipParserRequiresDatabaseAnalyzer(t *testing.T) { + skipParser := true + analyzerDisabled := false + conf := config.SQL{ + Engine: config.EnginePostgreSQL, + Database: &config.Database{ + URI: "postgresql://localhost/test", + }, + Analyzer: config.Analyzer{ + SkipParser: &skipParser, + Database: &analyzerDisabled, + }, + } + + combo := config.CombinedSettings{ + Package: conf, + } + + _, err := NewCompiler(conf, combo) + if err == nil { + t.Fatal("expected error when skip_parser is true but database analyzer is disabled") + } + if err.Error() != "skip_parser requires database analyzer to be enabled" { + t.Errorf("unexpected error message: %s", err) + } +} + +func TestSkipParserOnlyPostgreSQL(t *testing.T) { + skipParser := true + engines := []config.Engine{ + config.EngineMySQL, + config.EngineSQLite, + } + + for _, engine := range engines { + conf := config.SQL{ + Engine: engine, + Database: &config.Database{ + URI: "test://localhost/test", + }, + Analyzer: config.Analyzer{ + SkipParser: &skipParser, + }, + } + + combo := config.CombinedSettings{ + Package: conf, + } + + _, err := NewCompiler(conf, combo) + if err == nil { + t.Fatalf("expected error for engine %s with skip_parser", engine) + } + if err.Error() != "skip_parser is only supported for PostgreSQL" { + t.Errorf("unexpected error message for %s: %s", engine, err) + } + } +} + +func TestSkipParserValidConfig(t *testing.T) { + skipParser := true + conf := config.SQL{ + Engine: config.EnginePostgreSQL, + Database: &config.Database{ + URI: "postgresql://localhost/test", + }, + Analyzer: config.Analyzer{ + SkipParser: &skipParser, + }, + } + + combo := config.CombinedSettings{ + Package: conf, + } + + c, err := NewCompiler(conf, combo) + if err != nil { + t.Fatalf("unexpected error with valid skip_parser config: %s", err) + } + + // Verify parser and catalog are nil when skip_parser is true + if c.parser != nil { + t.Error("expected parser to be nil when skip_parser is true") + } + if c.catalog != nil { + t.Error("expected catalog to be nil when skip_parser is true") + } + // Analyzer should still be set (but we can't check it without a real DB connection) +} + +func TestSkipParserDisabledNormalOperation(t *testing.T) { + skipParser := false + conf := config.SQL{ + Engine: config.EnginePostgreSQL, + Analyzer: config.Analyzer{ + SkipParser: &skipParser, + }, + } + + combo := config.CombinedSettings{ + Package: conf, + } + + c, err := NewCompiler(conf, combo) + if err != nil { + t.Fatalf("unexpected error with skip_parser=false: %s", err) + } + + // Verify parser and catalog ARE set when skip_parser is false + if c.parser == nil { + t.Error("expected parser to be set when skip_parser is false") + } + if c.catalog == nil { + t.Error("expected catalog to be set when skip_parser is false") + } +} + +func TestSkipParserDefaultNormalOperation(t *testing.T) { + // When skip_parser is not specified (nil), should work normally + conf := config.SQL{ + Engine: config.EnginePostgreSQL, + } + + combo := config.CombinedSettings{ + Package: conf, + } + + c, err := NewCompiler(conf, combo) + if err != nil { + t.Fatalf("unexpected error with default config: %s", err) + } + + // Verify parser and catalog ARE set by default + if c.parser == nil { + t.Error("expected parser to be set by default") + } + if c.catalog == nil { + t.Error("expected catalog to be set by default") + } +} diff --git a/internal/compiler/split_test.go b/internal/compiler/split_test.go new file mode 100644 index 0000000000..87b3003eaf --- /dev/null +++ b/internal/compiler/split_test.go @@ -0,0 +1,176 @@ +package compiler + +import ( + "context" + "strings" + "testing" + + "github.com/sqlc-dev/sqlc/internal/sql/sqlfile" +) + +func TestSqlfileSplitForSkipParser(t *testing.T) { + input := ` +-- name: GetUser :one +SELECT id, name FROM users WHERE id = $1; + +-- name: ListUsers :many +SELECT id, name FROM users ORDER BY id; + +-- name: CreateUser :one +INSERT INTO users (name) VALUES ($1) RETURNING id, name; +` + + ctx := context.Background() + queries, err := sqlfile.Split(ctx, strings.NewReader(input)) + if err != nil { + t.Fatalf("failed to split queries: %s", err) + } + + if len(queries) != 3 { + t.Fatalf("expected 3 queries, got %d", len(queries)) + } + + // Check first query + if !strings.Contains(queries[0], "GetUser") { + t.Errorf("first query should contain GetUser, got: %s", queries[0]) + } + if !strings.Contains(queries[0], "WHERE id = $1") { + t.Errorf("first query should contain WHERE clause, got: %s", queries[0]) + } + + // Check second query + if !strings.Contains(queries[1], "ListUsers") { + t.Errorf("second query should contain ListUsers, got: %s", queries[1]) + } + if !strings.Contains(queries[1], "ORDER BY id") { + t.Errorf("second query should contain ORDER BY, got: %s", queries[1]) + } + + // Check third query + if !strings.Contains(queries[2], "CreateUser") { + t.Errorf("third query should contain CreateUser, got: %s", queries[2]) + } + if !strings.Contains(queries[2], "RETURNING") { + t.Errorf("third query should contain RETURNING, got: %s", queries[2]) + } +} + +func TestSqlfileSplitWithComplexQueries(t *testing.T) { + input := ` +-- name: ComplexQuery :many +SELECT + id, + name, + CASE + WHEN price > 100 THEN 'expensive' + ELSE 'affordable' + END as category, + tags +FROM products +WHERE + name LIKE '%' || $1 || '%' + AND $2 = ANY(tags) +ORDER BY created_at DESC +LIMIT $3; + +-- name: UpdateWithJSON :one +UPDATE products +SET metadata = $2::jsonb +WHERE id = $1 +RETURNING id, metadata; +` + + ctx := context.Background() + queries, err := sqlfile.Split(ctx, strings.NewReader(input)) + if err != nil { + t.Fatalf("failed to split complex queries: %s", err) + } + + if len(queries) != 2 { + t.Fatalf("expected 2 queries, got %d", len(queries)) + } + + // Check first complex query + if !strings.Contains(queries[0], "CASE") { + t.Errorf("first query should contain CASE statement, got: %s", queries[0]) + } + if !strings.Contains(queries[0], "ANY(tags)") { + t.Errorf("first query should contain ANY operator, got: %s", queries[0]) + } + + // Check second query with JSON + if !strings.Contains(queries[1], "::jsonb") { + t.Errorf("second query should contain jsonb cast, got: %s", queries[1]) + } + if !strings.Contains(queries[1], "RETURNING") { + t.Errorf("second query should contain RETURNING, got: %s", queries[1]) + } +} + +func TestSqlfileSplitWithDollarQuotes(t *testing.T) { + input := ` +-- name: CreateFunction :exec +CREATE OR REPLACE FUNCTION calculate_total(p_price NUMERIC, p_quantity INTEGER) +RETURNS NUMERIC AS $$ +BEGIN + RETURN p_price * p_quantity; +END; +$$ LANGUAGE plpgsql; + +-- name: GetValue :one +SELECT $$This is a dollar quoted string$$ as value; +` + + ctx := context.Background() + queries, err := sqlfile.Split(ctx, strings.NewReader(input)) + if err != nil { + t.Fatalf("failed to split queries with dollar quotes: %s", err) + } + + if len(queries) != 2 { + t.Fatalf("expected 2 queries, got %d", len(queries)) + } + + // Check function creation with dollar quotes + if !strings.Contains(queries[0], "$$") { + t.Errorf("first query should contain dollar quotes, got: %s", queries[0]) + } + if !strings.Contains(queries[0], "plpgsql") { + t.Errorf("first query should contain plpgsql, got: %s", queries[0]) + } + + // Check dollar quoted string + if !strings.Contains(queries[1], "dollar quoted string") { + t.Errorf("second query should contain dollar quoted string, got: %s", queries[1]) + } +} + +func TestSqlfileSplitEmptyAndComments(t *testing.T) { + input := ` +-- name: OnlyQuery :one +SELECT 1; +` + + ctx := context.Background() + queries, err := sqlfile.Split(ctx, strings.NewReader(input)) + if err != nil { + t.Fatalf("failed to split queries: %s", err) + } + + // Should get at least one query + if len(queries) < 1 { + t.Fatalf("expected at least 1 query, got %d", len(queries)) + } + + // Find the query with our name + found := false + for _, q := range queries { + if strings.Contains(q, "OnlyQuery") { + found = true + break + } + } + if !found { + t.Errorf("should find query with OnlyQuery") + } +} diff --git a/internal/config/skip_parser_test.go b/internal/config/skip_parser_test.go new file mode 100644 index 0000000000..1da78cef38 --- /dev/null +++ b/internal/config/skip_parser_test.go @@ -0,0 +1,105 @@ +package config + +import ( + "strings" + "testing" +) + +func TestSkipParserConfig(t *testing.T) { + yaml := ` +version: "2" +sql: + - name: "test" + engine: "postgresql" + queries: "query.sql" + schema: "schema.sql" + database: + uri: "postgresql://localhost/test" + analyzer: + skip_parser: true + gen: + go: + package: "test" + out: "test" +` + + conf, err := ParseConfig(strings.NewReader(yaml)) + if err != nil { + t.Fatalf("failed to parse config: %s", err) + } + + if len(conf.SQL) != 1 { + t.Fatalf("expected 1 SQL config, got %d", len(conf.SQL)) + } + + sql := conf.SQL[0] + if sql.Analyzer.SkipParser == nil { + t.Fatal("expected skip_parser to be set") + } + if !*sql.Analyzer.SkipParser { + t.Error("expected skip_parser to be true") + } +} + +func TestSkipParserConfigDefault(t *testing.T) { + yaml := ` +version: "2" +sql: + - name: "test" + engine: "postgresql" + queries: "query.sql" + schema: "schema.sql" + gen: + go: + package: "test" + out: "test" +` + + conf, err := ParseConfig(strings.NewReader(yaml)) + if err != nil { + t.Fatalf("failed to parse config: %s", err) + } + + if len(conf.SQL) != 1 { + t.Fatalf("expected 1 SQL config, got %d", len(conf.SQL)) + } + + sql := conf.SQL[0] + if sql.Analyzer.SkipParser != nil { + t.Errorf("expected skip_parser to be nil (default), got %v", *sql.Analyzer.SkipParser) + } +} + +func TestSkipParserConfigFalse(t *testing.T) { + yaml := ` +version: "2" +sql: + - name: "test" + engine: "postgresql" + queries: "query.sql" + schema: "schema.sql" + analyzer: + skip_parser: false + gen: + go: + package: "test" + out: "test" +` + + conf, err := ParseConfig(strings.NewReader(yaml)) + if err != nil { + t.Fatalf("failed to parse config: %s", err) + } + + if len(conf.SQL) != 1 { + t.Fatalf("expected 1 SQL config, got %d", len(conf.SQL)) + } + + sql := conf.SQL[0] + if sql.Analyzer.SkipParser == nil { + t.Fatal("expected skip_parser to be set") + } + if *sql.Analyzer.SkipParser { + t.Error("expected skip_parser to be false") + } +} From cc41aced154a9bc7e5eb41c7be7081ff0adb7ee6 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 25 Oct 2025 19:57:04 +0000 Subject: [PATCH 3/4] Add endtoend tests for skip_parser feature MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds proper endtoend tests for the skip_parser feature, replacing the previous examples and unit tests with tests that follow the standard sqlc testing infrastructure. ## Removed - examples/skip_parser/ - Example directory - internal/compiler/skip_parser_test.go - Unit tests - internal/compiler/split_test.go - Unit tests - internal/config/skip_parser_test.go - Unit tests ## Added Endtoend Tests ### 1. skip_parser/postgresql/pgx/v5 Success test case demonstrating skip_parser functionality: - Uses managed database (contexts: ["managed-db"]) - Tests CRUD operations with PostgreSQL-specific types - Schema: products table with BIGSERIAL, TEXT, NUMERIC, TEXT[] - Queries: GetProduct, ListProducts, CreateProduct - Expected output: Generated Go code with pgx/v5 ### 2. skip_parser_error_no_database/postgresql Error test case verifying validation: - Attempts to use skip_parser without database configuration - Expected stderr: "skip_parser requires database configuration" - Verifies error handling ## Test Structure Tests follow the standard endtoend test pattern: - Located in internal/endtoend/testdata/ - Each test has sqlc.json, schema.sql, query.sql - Expected output files in go/ subdirectory - exec.json specifies test context requirements - stderr.txt contains expected error messages ## Running Tests ```bash # Error test (no database needed) go test -tags=examples -run 'TestReplay/base/skip_parser_error' ./internal/endtoend # Success test (requires database) go test -tags=examples -run 'TestReplay/managed-db/skip_parser' ./internal/endtoend ``` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- examples/skip_parser/README.md | 118 ------------ examples/skip_parser/postgresql/.gitignore | 4 - examples/skip_parser/postgresql/db_test.go | 165 ---------------- examples/skip_parser/postgresql/query.sql | 34 ---- examples/skip_parser/postgresql/schema.sql | 11 -- examples/skip_parser/sqlc.yaml | 15 -- internal/compiler/skip_parser_test.go | 170 ----------------- internal/compiler/split_test.go | 176 ------------------ internal/config/skip_parser_test.go | 105 ----------- .../skip_parser/postgresql/pgx/v5/exec.json | 3 + .../skip_parser/postgresql/pgx/v5/go/db.go | 32 ++++ .../postgresql/pgx/v5/go/models.go | 16 ++ .../postgresql/pgx/v5/go/query.sql.go | 85 +++++++++ .../skip_parser/postgresql/pgx/v5/query.sql | 14 ++ .../skip_parser/postgresql/pgx/v5/schema.sql | 6 + .../skip_parser/postgresql/pgx/v5/sqlc.json | 24 +++ .../postgresql/query.sql | 2 + .../postgresql/schema.sql | 1 + .../postgresql/sqlc.json | 20 ++ .../postgresql/stderr.txt | 1 + 20 files changed, 204 insertions(+), 798 deletions(-) delete mode 100644 examples/skip_parser/README.md delete mode 100644 examples/skip_parser/postgresql/.gitignore delete mode 100644 examples/skip_parser/postgresql/db_test.go delete mode 100644 examples/skip_parser/postgresql/query.sql delete mode 100644 examples/skip_parser/postgresql/schema.sql delete mode 100644 examples/skip_parser/sqlc.yaml delete mode 100644 internal/compiler/skip_parser_test.go delete mode 100644 internal/compiler/split_test.go delete mode 100644 internal/config/skip_parser_test.go create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql create mode 100644 internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json create mode 100644 internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql create mode 100644 internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql create mode 100644 internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json create mode 100644 internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt diff --git a/examples/skip_parser/README.md b/examples/skip_parser/README.md deleted file mode 100644 index 599f01ad14..0000000000 --- a/examples/skip_parser/README.md +++ /dev/null @@ -1,118 +0,0 @@ -# Skip Parser Example - -This example demonstrates the `analyzer.skip_parser` configuration option for PostgreSQL. - -## What is skip_parser? - -When `analyzer.skip_parser: true` is set in the configuration, sqlc will: - -1. **Skip the parser** - No parsing of SQL schema or query files -2. **Skip the catalog** - No building of the internal schema catalog -3. **Use database analyzer only** - Rely entirely on the PostgreSQL database for query analysis - -This is useful when: -- Working with complex PostgreSQL syntax not fully supported by the parser -- You want to ensure queries are validated against the actual database schema -- Using database-specific features or extensions - -## Configuration - -See `sqlc.yaml` for the configuration: - -```yaml -version: '2' -sql: -- name: postgresql - schema: postgresql/schema.sql - queries: postgresql/query.sql - engine: postgresql - database: - uri: "${SKIP_PARSER_TEST_POSTGRES}" - analyzer: - skip_parser: true # This enables the feature - gen: - go: - package: skipparser - sql_package: pgx/v5 - out: postgresql -``` - -## How It Works - -1. The schema file (`schema.sql`) is **NOT** parsed by sqlc -2. The schema must be applied to the database separately (tests do this automatically) -3. Query files (`query.sql`) are split using `sqlfile.Split` -4. Each query is sent to PostgreSQL's analyzer for validation -5. Column and parameter types are retrieved from the database -6. Code is generated based on the database analysis - -## Running the Tests - -### Prerequisites - -- PostgreSQL server running and accessible -- Set the `SKIP_PARSER_TEST_POSTGRES` environment variable - -### Option 1: Using Docker Compose - -```bash -# Start PostgreSQL -docker compose up -d - -# Set environment variable -export SKIP_PARSER_TEST_POSTGRES="postgresql://postgres:mysecretpassword@localhost:5432/postgres" - -# Run the test -go test -tags=examples ./examples/skip_parser/postgresql -``` - -### Option 2: Using existing PostgreSQL - -```bash -# Set environment variable to your PostgreSQL instance -export SKIP_PARSER_TEST_POSTGRES="postgresql://user:pass@localhost:5432/dbname" - -# Run the test -go test -tags=examples ./examples/skip_parser/postgresql -``` - -### Generating Code - -```bash -# Make sure database is running and accessible -export SKIP_PARSER_TEST_POSTGRES="postgresql://postgres:mysecretpassword@localhost:5432/postgres" - -# Generate code -cd examples/skip_parser -sqlc generate -``` - -## Tests Included - -The `db_test.go` file includes comprehensive tests: - -### TestSkipParser -- Creates a product with arrays and JSON fields -- Tests all CRUD operations (Create, Read, Update, Delete) -- Tests list and search operations -- Tests counting - -### TestSkipParserComplexTypes -- Tests PostgreSQL-specific types (arrays, JSONB) -- Tests handling of nil/empty values -- Validates array and JSON handling - -## Features Tested - -This example tests the following PostgreSQL features with skip_parser: - -- **BIGSERIAL** - Auto-incrementing primary keys -- **NUMERIC** - Decimal types -- **TIMESTAMPTZ** - Timestamps with timezone -- **TEXT[]** - Text arrays -- **JSONB** - Binary JSON storage -- **ANY operator** - Array containment queries -- **GIN indexes** - Generalized inverted indexes for arrays -- **RETURNING clause** - Return values from INSERT/UPDATE - -All of these are validated directly by PostgreSQL without parser involvement! diff --git a/examples/skip_parser/postgresql/.gitignore b/examples/skip_parser/postgresql/.gitignore deleted file mode 100644 index f340015a6d..0000000000 --- a/examples/skip_parser/postgresql/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Generated files - run 'sqlc generate' to create them -db.go -models.go -query.sql.go diff --git a/examples/skip_parser/postgresql/db_test.go b/examples/skip_parser/postgresql/db_test.go deleted file mode 100644 index 427b037bc0..0000000000 --- a/examples/skip_parser/postgresql/db_test.go +++ /dev/null @@ -1,165 +0,0 @@ -//go:build examples - -package skipparser - -import ( - "context" - "testing" - - "github.com/jackc/pgx/v5" - "github.com/jackc/pgx/v5/pgtype" - - "github.com/sqlc-dev/sqlc/internal/sqltest/local" -) - -func TestSkipParser(t *testing.T) { - ctx := context.Background() - uri := local.PostgreSQL(t, []string{"schema.sql"}) - db, err := pgx.Connect(ctx, uri) - if err != nil { - t.Fatal(err) - } - defer db.Close(ctx) - - q := New(db) - - // Test CountProducts on empty database - count, err := q.CountProducts(ctx) - if err != nil { - t.Fatal(err) - } - if count != 0 { - t.Errorf("expected 0 products, got %d", count) - } - - // Test CreateProduct - product, err := q.CreateProduct(ctx, CreateProductParams{ - Name: "Test Product", - Price: "99.99", - Tags: []string{"electronics", "test"}, - Metadata: []byte(`{"color": "blue", "weight": 1.5}`), - }) - if err != nil { - t.Fatal(err) - } - if product.ID == 0 { - t.Error("expected product ID to be non-zero") - } - if product.Name != "Test Product" { - t.Errorf("expected name 'Test Product', got %s", product.Name) - } - t.Logf("Created product: %+v", product) - - // Test GetProduct - fetchedProduct, err := q.GetProduct(ctx, product.ID) - if err != nil { - t.Fatal(err) - } - if fetchedProduct.ID != product.ID { - t.Errorf("expected ID %d, got %d", product.ID, fetchedProduct.ID) - } - t.Logf("Fetched product: %+v", fetchedProduct) - - // Test ListProducts - products, err := q.ListProducts(ctx, ListProductsParams{ - Limit: 10, - Offset: 0, - }) - if err != nil { - t.Fatal(err) - } - if len(products) != 1 { - t.Errorf("expected 1 product, got %d", len(products)) - } - t.Logf("Listed products: %+v", products) - - // Test UpdateProduct - updatedProduct, err := q.UpdateProduct(ctx, UpdateProductParams{ - ID: product.ID, - Name: "Updated Product", - Price: "149.99", - Tags: []string{"electronics", "updated"}, - Metadata: []byte(`{"color": "red", "weight": 2.0}`), - }) - if err != nil { - t.Fatal(err) - } - if updatedProduct.Name != "Updated Product" { - t.Errorf("expected name 'Updated Product', got %s", updatedProduct.Name) - } - t.Logf("Updated product: %+v", updatedProduct) - - // Test SearchProductsByTag - tagProducts, err := q.SearchProductsByTag(ctx, "electronics") - if err != nil { - t.Fatal(err) - } - if len(tagProducts) != 1 { - t.Errorf("expected 1 product with tag 'electronics', got %d", len(tagProducts)) - } - t.Logf("Products with tag 'electronics': %+v", tagProducts) - - // Test CountProducts after insert - count, err = q.CountProducts(ctx) - if err != nil { - t.Fatal(err) - } - if count != 1 { - t.Errorf("expected 1 product, got %d", count) - } - - // Test DeleteProduct - err = q.DeleteProduct(ctx, product.ID) - if err != nil { - t.Fatal(err) - } - - // Verify deletion - count, err = q.CountProducts(ctx) - if err != nil { - t.Fatal(err) - } - if count != 0 { - t.Errorf("expected 0 products after deletion, got %d", count) - } -} - -func TestSkipParserComplexTypes(t *testing.T) { - ctx := context.Background() - uri := local.PostgreSQL(t, []string{"schema.sql"}) - db, err := pgx.Connect(ctx, uri) - if err != nil { - t.Fatal(err) - } - defer db.Close(ctx) - - q := New(db) - - // Test with empty arrays and JSON - product, err := q.CreateProduct(ctx, CreateProductParams{ - Name: "Minimal Product", - Price: "19.99", - Tags: []string{}, - Metadata: []byte(`{}`), - }) - if err != nil { - t.Fatal(err) - } - t.Logf("Created minimal product: %+v", product) - - // Test with nil values where allowed (using pgtype for nullable fields) - product2, err := q.CreateProduct(ctx, CreateProductParams{ - Name: "Another Product", - Price: "29.99", - Tags: nil, - Metadata: nil, - }) - if err != nil { - t.Fatal(err) - } - t.Logf("Created product with nil arrays: %+v", product2) - - // Cleanup - _ = q.DeleteProduct(ctx, product.ID) - _ = q.DeleteProduct(ctx, product2.ID) -} diff --git a/examples/skip_parser/postgresql/query.sql b/examples/skip_parser/postgresql/query.sql deleted file mode 100644 index ce24114d0d..0000000000 --- a/examples/skip_parser/postgresql/query.sql +++ /dev/null @@ -1,34 +0,0 @@ --- name: GetProduct :one -SELECT id, name, price, created_at, tags, metadata -FROM products -WHERE id = $1; - --- name: ListProducts :many -SELECT id, name, price, created_at, tags, metadata -FROM products -ORDER BY created_at DESC -LIMIT $1 OFFSET $2; - --- name: CreateProduct :one -INSERT INTO products (name, price, tags, metadata) -VALUES ($1, $2, $3, $4) -RETURNING id, name, price, created_at, tags, metadata; - --- name: UpdateProduct :one -UPDATE products -SET name = $2, price = $3, tags = $4, metadata = $5 -WHERE id = $1 -RETURNING id, name, price, created_at, tags, metadata; - --- name: DeleteProduct :exec -DELETE FROM products -WHERE id = $1; - --- name: SearchProductsByTag :many -SELECT id, name, price, created_at, tags, metadata -FROM products -WHERE $1 = ANY(tags) -ORDER BY created_at DESC; - --- name: CountProducts :one -SELECT COUNT(*) FROM products; diff --git a/examples/skip_parser/postgresql/schema.sql b/examples/skip_parser/postgresql/schema.sql deleted file mode 100644 index d14fa37395..0000000000 --- a/examples/skip_parser/postgresql/schema.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE products ( - id BIGSERIAL PRIMARY KEY, - name TEXT NOT NULL, - price NUMERIC(10, 2) NOT NULL, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - tags TEXT[] DEFAULT '{}', - metadata JSONB DEFAULT '{}' -); - -CREATE INDEX idx_products_created_at ON products(created_at); -CREATE INDEX idx_products_tags ON products USING GIN(tags); diff --git a/examples/skip_parser/sqlc.yaml b/examples/skip_parser/sqlc.yaml deleted file mode 100644 index d2d47817be..0000000000 --- a/examples/skip_parser/sqlc.yaml +++ /dev/null @@ -1,15 +0,0 @@ -version: '2' -sql: -- name: postgresql - schema: postgresql/schema.sql - queries: postgresql/query.sql - engine: postgresql - database: - uri: "${SKIP_PARSER_TEST_POSTGRES}" - analyzer: - skip_parser: true - gen: - go: - package: skipparser - sql_package: pgx/v5 - out: postgresql diff --git a/internal/compiler/skip_parser_test.go b/internal/compiler/skip_parser_test.go deleted file mode 100644 index 50a59376e6..0000000000 --- a/internal/compiler/skip_parser_test.go +++ /dev/null @@ -1,170 +0,0 @@ -package compiler - -import ( - "testing" - - "github.com/sqlc-dev/sqlc/internal/config" -) - -func TestSkipParserRequiresDatabase(t *testing.T) { - skipParser := true - conf := config.SQL{ - Engine: config.EnginePostgreSQL, - Analyzer: config.Analyzer{ - SkipParser: &skipParser, - }, - } - - combo := config.CombinedSettings{ - Package: conf, - } - - _, err := NewCompiler(conf, combo) - if err == nil { - t.Fatal("expected error when skip_parser is true without database config") - } - if err.Error() != "skip_parser requires database configuration" { - t.Errorf("unexpected error message: %s", err) - } -} - -func TestSkipParserRequiresDatabaseAnalyzer(t *testing.T) { - skipParser := true - analyzerDisabled := false - conf := config.SQL{ - Engine: config.EnginePostgreSQL, - Database: &config.Database{ - URI: "postgresql://localhost/test", - }, - Analyzer: config.Analyzer{ - SkipParser: &skipParser, - Database: &analyzerDisabled, - }, - } - - combo := config.CombinedSettings{ - Package: conf, - } - - _, err := NewCompiler(conf, combo) - if err == nil { - t.Fatal("expected error when skip_parser is true but database analyzer is disabled") - } - if err.Error() != "skip_parser requires database analyzer to be enabled" { - t.Errorf("unexpected error message: %s", err) - } -} - -func TestSkipParserOnlyPostgreSQL(t *testing.T) { - skipParser := true - engines := []config.Engine{ - config.EngineMySQL, - config.EngineSQLite, - } - - for _, engine := range engines { - conf := config.SQL{ - Engine: engine, - Database: &config.Database{ - URI: "test://localhost/test", - }, - Analyzer: config.Analyzer{ - SkipParser: &skipParser, - }, - } - - combo := config.CombinedSettings{ - Package: conf, - } - - _, err := NewCompiler(conf, combo) - if err == nil { - t.Fatalf("expected error for engine %s with skip_parser", engine) - } - if err.Error() != "skip_parser is only supported for PostgreSQL" { - t.Errorf("unexpected error message for %s: %s", engine, err) - } - } -} - -func TestSkipParserValidConfig(t *testing.T) { - skipParser := true - conf := config.SQL{ - Engine: config.EnginePostgreSQL, - Database: &config.Database{ - URI: "postgresql://localhost/test", - }, - Analyzer: config.Analyzer{ - SkipParser: &skipParser, - }, - } - - combo := config.CombinedSettings{ - Package: conf, - } - - c, err := NewCompiler(conf, combo) - if err != nil { - t.Fatalf("unexpected error with valid skip_parser config: %s", err) - } - - // Verify parser and catalog are nil when skip_parser is true - if c.parser != nil { - t.Error("expected parser to be nil when skip_parser is true") - } - if c.catalog != nil { - t.Error("expected catalog to be nil when skip_parser is true") - } - // Analyzer should still be set (but we can't check it without a real DB connection) -} - -func TestSkipParserDisabledNormalOperation(t *testing.T) { - skipParser := false - conf := config.SQL{ - Engine: config.EnginePostgreSQL, - Analyzer: config.Analyzer{ - SkipParser: &skipParser, - }, - } - - combo := config.CombinedSettings{ - Package: conf, - } - - c, err := NewCompiler(conf, combo) - if err != nil { - t.Fatalf("unexpected error with skip_parser=false: %s", err) - } - - // Verify parser and catalog ARE set when skip_parser is false - if c.parser == nil { - t.Error("expected parser to be set when skip_parser is false") - } - if c.catalog == nil { - t.Error("expected catalog to be set when skip_parser is false") - } -} - -func TestSkipParserDefaultNormalOperation(t *testing.T) { - // When skip_parser is not specified (nil), should work normally - conf := config.SQL{ - Engine: config.EnginePostgreSQL, - } - - combo := config.CombinedSettings{ - Package: conf, - } - - c, err := NewCompiler(conf, combo) - if err != nil { - t.Fatalf("unexpected error with default config: %s", err) - } - - // Verify parser and catalog ARE set by default - if c.parser == nil { - t.Error("expected parser to be set by default") - } - if c.catalog == nil { - t.Error("expected catalog to be set by default") - } -} diff --git a/internal/compiler/split_test.go b/internal/compiler/split_test.go deleted file mode 100644 index 87b3003eaf..0000000000 --- a/internal/compiler/split_test.go +++ /dev/null @@ -1,176 +0,0 @@ -package compiler - -import ( - "context" - "strings" - "testing" - - "github.com/sqlc-dev/sqlc/internal/sql/sqlfile" -) - -func TestSqlfileSplitForSkipParser(t *testing.T) { - input := ` --- name: GetUser :one -SELECT id, name FROM users WHERE id = $1; - --- name: ListUsers :many -SELECT id, name FROM users ORDER BY id; - --- name: CreateUser :one -INSERT INTO users (name) VALUES ($1) RETURNING id, name; -` - - ctx := context.Background() - queries, err := sqlfile.Split(ctx, strings.NewReader(input)) - if err != nil { - t.Fatalf("failed to split queries: %s", err) - } - - if len(queries) != 3 { - t.Fatalf("expected 3 queries, got %d", len(queries)) - } - - // Check first query - if !strings.Contains(queries[0], "GetUser") { - t.Errorf("first query should contain GetUser, got: %s", queries[0]) - } - if !strings.Contains(queries[0], "WHERE id = $1") { - t.Errorf("first query should contain WHERE clause, got: %s", queries[0]) - } - - // Check second query - if !strings.Contains(queries[1], "ListUsers") { - t.Errorf("second query should contain ListUsers, got: %s", queries[1]) - } - if !strings.Contains(queries[1], "ORDER BY id") { - t.Errorf("second query should contain ORDER BY, got: %s", queries[1]) - } - - // Check third query - if !strings.Contains(queries[2], "CreateUser") { - t.Errorf("third query should contain CreateUser, got: %s", queries[2]) - } - if !strings.Contains(queries[2], "RETURNING") { - t.Errorf("third query should contain RETURNING, got: %s", queries[2]) - } -} - -func TestSqlfileSplitWithComplexQueries(t *testing.T) { - input := ` --- name: ComplexQuery :many -SELECT - id, - name, - CASE - WHEN price > 100 THEN 'expensive' - ELSE 'affordable' - END as category, - tags -FROM products -WHERE - name LIKE '%' || $1 || '%' - AND $2 = ANY(tags) -ORDER BY created_at DESC -LIMIT $3; - --- name: UpdateWithJSON :one -UPDATE products -SET metadata = $2::jsonb -WHERE id = $1 -RETURNING id, metadata; -` - - ctx := context.Background() - queries, err := sqlfile.Split(ctx, strings.NewReader(input)) - if err != nil { - t.Fatalf("failed to split complex queries: %s", err) - } - - if len(queries) != 2 { - t.Fatalf("expected 2 queries, got %d", len(queries)) - } - - // Check first complex query - if !strings.Contains(queries[0], "CASE") { - t.Errorf("first query should contain CASE statement, got: %s", queries[0]) - } - if !strings.Contains(queries[0], "ANY(tags)") { - t.Errorf("first query should contain ANY operator, got: %s", queries[0]) - } - - // Check second query with JSON - if !strings.Contains(queries[1], "::jsonb") { - t.Errorf("second query should contain jsonb cast, got: %s", queries[1]) - } - if !strings.Contains(queries[1], "RETURNING") { - t.Errorf("second query should contain RETURNING, got: %s", queries[1]) - } -} - -func TestSqlfileSplitWithDollarQuotes(t *testing.T) { - input := ` --- name: CreateFunction :exec -CREATE OR REPLACE FUNCTION calculate_total(p_price NUMERIC, p_quantity INTEGER) -RETURNS NUMERIC AS $$ -BEGIN - RETURN p_price * p_quantity; -END; -$$ LANGUAGE plpgsql; - --- name: GetValue :one -SELECT $$This is a dollar quoted string$$ as value; -` - - ctx := context.Background() - queries, err := sqlfile.Split(ctx, strings.NewReader(input)) - if err != nil { - t.Fatalf("failed to split queries with dollar quotes: %s", err) - } - - if len(queries) != 2 { - t.Fatalf("expected 2 queries, got %d", len(queries)) - } - - // Check function creation with dollar quotes - if !strings.Contains(queries[0], "$$") { - t.Errorf("first query should contain dollar quotes, got: %s", queries[0]) - } - if !strings.Contains(queries[0], "plpgsql") { - t.Errorf("first query should contain plpgsql, got: %s", queries[0]) - } - - // Check dollar quoted string - if !strings.Contains(queries[1], "dollar quoted string") { - t.Errorf("second query should contain dollar quoted string, got: %s", queries[1]) - } -} - -func TestSqlfileSplitEmptyAndComments(t *testing.T) { - input := ` --- name: OnlyQuery :one -SELECT 1; -` - - ctx := context.Background() - queries, err := sqlfile.Split(ctx, strings.NewReader(input)) - if err != nil { - t.Fatalf("failed to split queries: %s", err) - } - - // Should get at least one query - if len(queries) < 1 { - t.Fatalf("expected at least 1 query, got %d", len(queries)) - } - - // Find the query with our name - found := false - for _, q := range queries { - if strings.Contains(q, "OnlyQuery") { - found = true - break - } - } - if !found { - t.Errorf("should find query with OnlyQuery") - } -} diff --git a/internal/config/skip_parser_test.go b/internal/config/skip_parser_test.go deleted file mode 100644 index 1da78cef38..0000000000 --- a/internal/config/skip_parser_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package config - -import ( - "strings" - "testing" -) - -func TestSkipParserConfig(t *testing.T) { - yaml := ` -version: "2" -sql: - - name: "test" - engine: "postgresql" - queries: "query.sql" - schema: "schema.sql" - database: - uri: "postgresql://localhost/test" - analyzer: - skip_parser: true - gen: - go: - package: "test" - out: "test" -` - - conf, err := ParseConfig(strings.NewReader(yaml)) - if err != nil { - t.Fatalf("failed to parse config: %s", err) - } - - if len(conf.SQL) != 1 { - t.Fatalf("expected 1 SQL config, got %d", len(conf.SQL)) - } - - sql := conf.SQL[0] - if sql.Analyzer.SkipParser == nil { - t.Fatal("expected skip_parser to be set") - } - if !*sql.Analyzer.SkipParser { - t.Error("expected skip_parser to be true") - } -} - -func TestSkipParserConfigDefault(t *testing.T) { - yaml := ` -version: "2" -sql: - - name: "test" - engine: "postgresql" - queries: "query.sql" - schema: "schema.sql" - gen: - go: - package: "test" - out: "test" -` - - conf, err := ParseConfig(strings.NewReader(yaml)) - if err != nil { - t.Fatalf("failed to parse config: %s", err) - } - - if len(conf.SQL) != 1 { - t.Fatalf("expected 1 SQL config, got %d", len(conf.SQL)) - } - - sql := conf.SQL[0] - if sql.Analyzer.SkipParser != nil { - t.Errorf("expected skip_parser to be nil (default), got %v", *sql.Analyzer.SkipParser) - } -} - -func TestSkipParserConfigFalse(t *testing.T) { - yaml := ` -version: "2" -sql: - - name: "test" - engine: "postgresql" - queries: "query.sql" - schema: "schema.sql" - analyzer: - skip_parser: false - gen: - go: - package: "test" - out: "test" -` - - conf, err := ParseConfig(strings.NewReader(yaml)) - if err != nil { - t.Fatalf("failed to parse config: %s", err) - } - - if len(conf.SQL) != 1 { - t.Fatalf("expected 1 SQL config, got %d", len(conf.SQL)) - } - - sql := conf.SQL[0] - if sql.Analyzer.SkipParser == nil { - t.Fatal("expected skip_parser to be set") - } - if *sql.Analyzer.SkipParser { - t.Error("expected skip_parser to be false") - } -} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json new file mode 100644 index 0000000000..ee1b7ecd9e --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/exec.json @@ -0,0 +1,3 @@ +{ + "contexts": ["managed-db"] +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go new file mode 100644 index 0000000000..1e00549714 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/db.go @@ -0,0 +1,32 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 + +package querytest + +import ( + "context" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" +) + +type DBTX interface { + Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) + Query(context.Context, string, ...interface{}) (pgx.Rows, error) + QueryRow(context.Context, string, ...interface{}) pgx.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx pgx.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go new file mode 100644 index 0000000000..f64b1f4161 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/models.go @@ -0,0 +1,16 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 + +package querytest + +import ( + "github.com/jackc/pgx/v5/pgtype" +) + +type Product struct { + ID int64 + Name string + Price pgtype.Numeric + Tags []string +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go new file mode 100644 index 0000000000..e91e1de4cd --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/go/query.sql.go @@ -0,0 +1,85 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: query.sql + +package querytest + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createProduct = `-- name: CreateProduct :one +INSERT INTO products (name, price, tags) +VALUES ($1, $2, $3) +RETURNING id, name, price, tags +` + +type CreateProductParams struct { + Name string + Price pgtype.Numeric + Tags []string +} + +func (q *Queries) CreateProduct(ctx context.Context, arg CreateProductParams) (Product, error) { + row := q.db.QueryRow(ctx, createProduct, arg.Name, arg.Price, arg.Tags) + var i Product + err := row.Scan( + &i.ID, + &i.Name, + &i.Price, + &i.Tags, + ) + return i, err +} + +const getProduct = `-- name: GetProduct :one +SELECT id, name, price, tags +FROM products +WHERE id = $1 +` + +func (q *Queries) GetProduct(ctx context.Context, id int64) (Product, error) { + row := q.db.QueryRow(ctx, getProduct, id) + var i Product + err := row.Scan( + &i.ID, + &i.Name, + &i.Price, + &i.Tags, + ) + return i, err +} + +const listProducts = `-- name: ListProducts :many +SELECT id, name, price, tags +FROM products +ORDER BY id +` + +func (q *Queries) ListProducts(ctx context.Context) ([]Product, error) { + rows, err := q.db.Query(ctx, listProducts) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Product + for rows.Next() { + var i Product + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Price, + &i.Tags, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql new file mode 100644 index 0000000000..24da729235 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/query.sql @@ -0,0 +1,14 @@ +-- name: GetProduct :one +SELECT id, name, price, tags +FROM products +WHERE id = $1; + +-- name: ListProducts :many +SELECT id, name, price, tags +FROM products +ORDER BY id; + +-- name: CreateProduct :one +INSERT INTO products (name, price, tags) +VALUES ($1, $2, $3) +RETURNING id, name, price, tags; diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql new file mode 100644 index 0000000000..2b9fc7e5c1 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/schema.sql @@ -0,0 +1,6 @@ +CREATE TABLE products ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL, + price NUMERIC(10, 2) NOT NULL, + tags TEXT[] +); diff --git a/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json new file mode 100644 index 0000000000..445916f44f --- /dev/null +++ b/internal/endtoend/testdata/skip_parser/postgresql/pgx/v5/sqlc.json @@ -0,0 +1,24 @@ +{ + "version": "2", + "sql": [ + { + "name": "querytest", + "engine": "postgresql", + "queries": "query.sql", + "schema": "schema.sql", + "database": { + "managed": true + }, + "analyzer": { + "skip_parser": true + }, + "gen": { + "go": { + "package": "querytest", + "sql_package": "pgx/v5", + "out": "go" + } + } + } + ] +} diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql new file mode 100644 index 0000000000..7f38820027 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/query.sql @@ -0,0 +1,2 @@ +-- name: GetTest :one +SELECT id FROM test WHERE id = $1; diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql new file mode 100644 index 0000000000..48038dc8e8 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/schema.sql @@ -0,0 +1 @@ +CREATE TABLE test (id INT); diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json new file mode 100644 index 0000000000..82d1077270 --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/sqlc.json @@ -0,0 +1,20 @@ +{ + "version": "2", + "sql": [ + { + "name": "test", + "engine": "postgresql", + "queries": "query.sql", + "schema": "schema.sql", + "analyzer": { + "skip_parser": true + }, + "gen": { + "go": { + "package": "test", + "out": "go" + } + } + } + ] +} diff --git a/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt new file mode 100644 index 0000000000..594ae8755b --- /dev/null +++ b/internal/endtoend/testdata/skip_parser_error_no_database/postgresql/stderr.txt @@ -0,0 +1 @@ +error creating compiler: skip_parser requires database configuration From a971343cd1fe046360e2dee08083263eaf5e4594 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 25 Oct 2025 20:05:59 +0000 Subject: [PATCH 4/4] Fix nil pointer dereference in skip_parser with managed database MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The skip_parser feature was failing with a nil pointer dereference when used with managed databases because parseCatalog() was trying to use the parser and catalog, which are nil when skip_parser is enabled. ## Root Cause When skip_parser is enabled: 1. Parser and catalog are set to nil in NewCompiler 2. parseCatalog() was unconditionally calling c.parser.Parse() (line 46) 3. This caused a nil pointer dereference However, parseCatalog() still needs to be called even in skip_parser mode because: - The schema SQL text needs to be stored in c.schema - The database analyzer needs c.schema to pass to PostgreSQL ## Fix Modified parseCatalog() to check if skip_parser is enabled: - If skip_parser: Read schema files and store in c.schema, skip parsing - If normal mode: Parse schemas and update catalog as before Also reverted the change in generate.go that was skipping ParseCatalog entirely, since we always need to call it (it now handles skip_parser internally). ## Testing This fixes the panic in the managed-db context test: - TestReplay/managed-db/skip_parser/postgresql/pgx/v5 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- internal/cmd/generate.go | 30 +++++++++++------------------- internal/compiler/compile.go | 18 ++++++++++++++++++ 2 files changed, 29 insertions(+), 19 deletions(-) diff --git a/internal/cmd/generate.go b/internal/cmd/generate.go index 34ac4d4965..00e8871c7e 100644 --- a/internal/cmd/generate.go +++ b/internal/cmd/generate.go @@ -305,28 +305,20 @@ func parse(ctx context.Context, name, dir string, sql config.SQL, combo config.C fmt.Fprintf(stderr, "error creating compiler: %s\n", err) return nil, true } - - // Check if skip_parser is enabled - skipParser := sql.Analyzer.SkipParser != nil && *sql.Analyzer.SkipParser - - // Skip catalog parsing if skip_parser is enabled - if !skipParser { - if err := c.ParseCatalog(sql.Schema); err != nil { - fmt.Fprintf(stderr, "# package %s\n", name) - if parserErr, ok := err.(*multierr.Error); ok { - for _, fileErr := range parserErr.Errs() { - printFileErr(stderr, dir, fileErr) - } - } else { - fmt.Fprintf(stderr, "error parsing schema: %s\n", err) + if err := c.ParseCatalog(sql.Schema); err != nil { + fmt.Fprintf(stderr, "# package %s\n", name) + if parserErr, ok := err.(*multierr.Error); ok { + for _, fileErr := range parserErr.Errs() { + printFileErr(stderr, dir, fileErr) } - return nil, true - } - if parserOpts.Debug.DumpCatalog { - debug.Dump(c.Catalog()) + } else { + fmt.Fprintf(stderr, "error parsing schema: %s\n", err) } + return nil, true + } + if parserOpts.Debug.DumpCatalog { + debug.Dump(c.Catalog()) } - if err := c.ParseQueries(sql.Queries, parserOpts); err != nil { fmt.Fprintf(stderr, "# package %s\n", name) if parserErr, ok := err.(*multierr.Error); ok { diff --git a/internal/compiler/compile.go b/internal/compiler/compile.go index 43ec3c04f9..36e8b9a652 100644 --- a/internal/compiler/compile.go +++ b/internal/compiler/compile.go @@ -34,6 +34,24 @@ func (c *Compiler) parseCatalog(schemas []string) error { if err != nil { return err } + + // Check if we're in skip_parser mode + skipParser := c.conf.Analyzer.SkipParser != nil && *c.conf.Analyzer.SkipParser + + // If skip_parser is enabled, just read schema files without parsing + if skipParser { + for _, filename := range files { + blob, err := os.ReadFile(filename) + if err != nil { + return fmt.Errorf("reading schema file %s: %w", filename, err) + } + contents := migrations.RemoveRollbackStatements(string(blob)) + c.schema = append(c.schema, contents) + } + return nil + } + + // Normal path: parse and update catalog merr := multierr.New() for _, filename := range files { blob, err := os.ReadFile(filename)