Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,12 @@ cargo insta review

## Development Notes

### Code Quality Guidelines
**IMPORTANT**: Always run `cargo clippy --all-targets --all-features` and fix all warnings after making code changes. Clippy warnings must be resolved before committing code to maintain code quality standards.

### Git Commit and PR Guidelines
**IMPORTANT**: NEVER add "Claude" or any AI assistant name to commit messages or pull request descriptions. Commits and PRs should appear as authored by the human developer only.

### Code Generation
Many parser structures are generated from PostgreSQL's protobuf definitions using procedural macros in `pgls_query_macros`. Run `just gen-lint` after modifying analyzer rules or configurations.

Expand Down
1 change: 0 additions & 1 deletion crates/pgls_diagnostics/src/serde.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,6 @@ impl From<super::Location<'_>> for Location {
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[cfg_attr(test, derive(Eq, PartialEq))]

struct Advices {
advices: Vec<Advice>,
}
Expand Down
44 changes: 22 additions & 22 deletions crates/pgls_diagnostics_categories/src/categories.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,28 +48,28 @@ define_categories! {
"lint/safety/transactionNesting": "https://pg-language-server.com/latest/rules/transaction-nesting",
// end lint rules
// splinter rules start
"splinter/performance/authRlsInitplan": "https://supabase.com/docs/guides/database/database-linter?lint=0003_auth_rls_initplan",
"splinter/performance/duplicateIndex": "https://supabase.com/docs/guides/database/database-linter?lint=0009_duplicate_index",
"splinter/performance/multiplePermissivePolicies": "https://supabase.com/docs/guides/database/database-linter?lint=0006_multiple_permissive_policies",
"splinter/performance/noPrimaryKey": "https://supabase.com/docs/guides/database/database-linter?lint=0004_no_primary_key",
"splinter/performance/tableBloat": "https://supabase.com/docs/guides/database/database-linter?lint=0020_table_bloat",
"splinter/performance/unindexedForeignKeys": "https://supabase.com/docs/guides/database/database-linter?lint=0001_unindexed_foreign_keys",
"splinter/performance/unusedIndex": "https://supabase.com/docs/guides/database/database-linter?lint=0005_unused_index",
"splinter/security/authUsersExposed": "https://supabase.com/docs/guides/database/database-linter?lint=0002_auth_users_exposed",
"splinter/security/extensionInPublic": "https://supabase.com/docs/guides/database/database-linter?lint=0014_extension_in_public",
"splinter/security/extensionVersionsOutdated": "https://supabase.com/docs/guides/database/database-linter?lint=0022_extension_versions_outdated",
"splinter/security/fkeyToAuthUnique": "https://supabase.com/docs/guides/database/database-linter?lint=0021_fkey_to_auth_unique",
"splinter/security/foreignTableInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0017_foreign_table_in_api",
"splinter/security/functionSearchPathMutable": "https://supabase.com/docs/guides/database/database-linter?lint=0011_function_search_path_mutable",
"splinter/security/insecureQueueExposedInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0019_insecure_queue_exposed_in_api",
"splinter/security/materializedViewInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0016_materialized_view_in_api",
"splinter/security/policyExistsRlsDisabled": "https://supabase.com/docs/guides/database/database-linter?lint=0007_policy_exists_rls_disabled",
"splinter/security/rlsDisabledInPublic": "https://supabase.com/docs/guides/database/database-linter?lint=0013_rls_disabled_in_public",
"splinter/security/rlsEnabledNoPolicy": "https://supabase.com/docs/guides/database/database-linter?lint=0008_rls_enabled_no_policy",
"splinter/security/rlsReferencesUserMetadata": "https://supabase.com/docs/guides/database/database-linter?lint=0015_rls_references_user_metadata",
"splinter/security/securityDefinerView": "https://supabase.com/docs/guides/database/database-linter?lint=0010_security_definer_view",
"splinter/security/unsupportedRegTypes": "https://supabase.com/docs/guides/database/database-linter?lint=unsupported_reg_types",
"splinter/unknown/unknown": "https://pg-language-server.com/latest",
"splinter/performance/authRlsInitplan": "https://supabase.com/docs/guides/database/database-advisors?lint=0003_auth_rls_initplan",
"splinter/performance/duplicateIndex": "https://supabase.com/docs/guides/database/database-advisors?lint=0009_duplicate_index",
"splinter/performance/multiplePermissivePolicies": "https://supabase.com/docs/guides/database/database-advisors?lint=0006_multiple_permissive_policies",
"splinter/performance/noPrimaryKey": "https://supabase.com/docs/guides/database/database-advisors?lint=0004_no_primary_key",
"splinter/performance/tableBloat": "https://supabase.com/docs/guides/database/database-advisors",
"splinter/performance/unindexedForeignKeys": "https://supabase.com/docs/guides/database/database-advisors?lint=0001_unindexed_foreign_keys",
"splinter/performance/unusedIndex": "https://supabase.com/docs/guides/database/database-advisors?lint=0005_unused_index",
"splinter/security/authUsersExposed": "https://supabase.com/docs/guides/database/database-advisors?lint=0002_auth_users_exposed",
"splinter/security/extensionInPublic": "https://supabase.com/docs/guides/database/database-advisors?lint=0014_extension_in_public",
"splinter/security/extensionVersionsOutdated": "https://supabase.com/docs/guides/database/database-advisors?lint=0022_extension_versions_outdated",
"splinter/security/fkeyToAuthUnique": "https://supabase.com/docs/guides/database/database-advisors",
"splinter/security/foreignTableInApi": "https://supabase.com/docs/guides/database/database-advisors?lint=0017_foreign_table_in_api",
"splinter/security/functionSearchPathMutable": "https://supabase.com/docs/guides/database/database-advisors?lint=0011_function_search_path_mutable",
"splinter/security/insecureQueueExposedInApi": "https://supabase.com/docs/guides/database/database-advisors?lint=0019_insecure_queue_exposed_in_api",
"splinter/security/materializedViewInApi": "https://supabase.com/docs/guides/database/database-advisors?lint=0016_materialized_view_in_api",
"splinter/security/policyExistsRlsDisabled": "https://supabase.com/docs/guides/database/database-advisors?lint=0007_policy_exists_rls_disabled",
"splinter/security/rlsDisabledInPublic": "https://supabase.com/docs/guides/database/database-advisors?lint=0013_rls_disabled_in_public",
"splinter/security/rlsEnabledNoPolicy": "https://supabase.com/docs/guides/database/database-advisors?lint=0008_rls_enabled_no_policy",
"splinter/security/rlsReferencesUserMetadata": "https://supabase.com/docs/guides/database/database-advisors?lint=0015_rls_references_user_metadata",
"splinter/security/securityDefinerView": "https://supabase.com/docs/guides/database/database-advisors?lint=0010_security_definer_view",
"splinter/security/unsupportedRegTypes": "https://supabase.com/docs/guides/database/database-advisors?lint=unsupported_reg_types",
"splinter/unknown/unknown": "https://supabase.com/docs/guides/database/database-advisors",
// splinter rules end
;
// General categories
Expand Down

Large diffs are not rendered by default.

Large diffs are not rendered by default.

137 changes: 121 additions & 16 deletions crates/pgls_splinter/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,40 @@ use std::path::Path;
// Update this commit SHA to pull in a new version of splinter.sql
const SPLINTER_COMMIT_SHA: &str = "27ea2ece65464213e466cd969cc61b6940d16219";

// Rules that work on any PostgreSQL database
const GENERIC_RULES: &[&str] = &[
"unindexed_foreign_keys",
"no_primary_key",
"unused_index",
"multiple_permissive_policies",
"policy_exists_rls_disabled",
"rls_enabled_no_policy",
"duplicate_index",
"extension_in_public",
"table_bloat",
"extension_versions_outdated",
"function_search_path_mutable",
"unsupported_reg_types",
];

// Rules that require Supabase-specific infrastructure (auth schema, anon/authenticated roles, pgrst.db_schemas)
const SUPABASE_ONLY_RULES: &[&str] = &[
"auth_users_exposed",
"auth_rls_initplan",
"rls_disabled_in_public",
"security_definer_view",
"rls_references_user_metadata",
"materialized_view_in_api",
"foreign_table_in_api",
"insecure_queue_exposed_in_api",
"fkey_to_auth_unique",
];

fn main() {
let out_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let vendor_dir = Path::new(&out_dir).join("vendor");
let sql_file = vendor_dir.join("splinter.sql");
let generic_sql_file = vendor_dir.join("splinter_generic.sql");
let supabase_sql_file = vendor_dir.join("splinter_supabase.sql");
let sha_file = vendor_dir.join("COMMIT_SHA.txt");

// Create vendor directory if it doesn't exist
Expand All @@ -17,22 +47,23 @@ fn main() {
}

// Check if we need to download
let needs_download = if !sql_file.exists() || !sha_file.exists() {
true
} else {
// Check if stored SHA matches current constant
let stored_sha = fs::read_to_string(&sha_file)
.expect("Failed to read COMMIT_SHA.txt")
.trim()
.to_string();
stored_sha != SPLINTER_COMMIT_SHA
};
let needs_download =
if !generic_sql_file.exists() || !supabase_sql_file.exists() || !sha_file.exists() {
true
} else {
// Check if stored SHA matches current constant
let stored_sha = fs::read_to_string(&sha_file)
.expect("Failed to read COMMIT_SHA.txt")
.trim()
.to_string();
stored_sha != SPLINTER_COMMIT_SHA
};

if needs_download {
println!(
"cargo:warning=Downloading splinter.sql from GitHub (commit: {SPLINTER_COMMIT_SHA})"
);
download_and_process_sql(&sql_file);
download_and_process_sql(&generic_sql_file, &supabase_sql_file);
fs::write(&sha_file, SPLINTER_COMMIT_SHA).expect("Failed to write COMMIT_SHA.txt");
}

Expand All @@ -41,7 +72,7 @@ fn main() {
println!("cargo:rerun-if-changed=vendor/COMMIT_SHA.txt");
}

fn download_and_process_sql(dest_path: &Path) {
fn download_and_process_sql(generic_dest: &Path, supabase_dest: &Path) {
let url = format!(
"https://raw.githubusercontent.com/supabase/splinter/{SPLINTER_COMMIT_SHA}/splinter.sql"
);
Expand All @@ -61,10 +92,16 @@ fn download_and_process_sql(dest_path: &Path) {
// Add "!" suffix to column aliases for sqlx non-null checking
processed_content = add_not_null_markers(&processed_content);

// Write to destination
fs::write(dest_path, processed_content).expect("Failed to write splinter.sql");
// Split into generic and Supabase-specific queries (validates categorization)
let (generic_queries, supabase_queries) = split_queries(&processed_content);

println!("cargo:warning=Successfully downloaded and processed splinter.sql");
// Write to destination files
fs::write(generic_dest, generic_queries).expect("Failed to write splinter_generic.sql");
fs::write(supabase_dest, supabase_queries).expect("Failed to write splinter_supabase.sql");

println!(
"cargo:warning=Successfully downloaded and processed splinter.sql into generic and Supabase-specific files"
);
}

fn remove_set_search_path(content: &str) -> String {
Expand Down Expand Up @@ -107,3 +144,71 @@ fn add_not_null_markers(content: &str) -> String {

result
}

/// Extract rule name from a query fragment
fn extract_rule_name_from_query(query: &str) -> String {
// Look for pattern 'rule_name' as "name!"
for line in query.lines() {
if line.contains(" as \"name!\"") {
if let Some(start) = line.rfind('\'') {
if let Some(prev_quote) = line[..start].rfind('\'') {
return line[prev_quote + 1..start].to_string();
}
}
}
}
"unknown".to_string()
}

fn split_queries(content: &str) -> (String, String) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

as I understand it, rules are per default put into the generic category.
what happens if supabase adds a new rule to the spliter that's supabase-specific?
that might break linting in case a table isn't defined for non-supabase databases, right?
should we throw during build if there's an unknown rule in the splinter?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

great catch!

// Split the union all queries based on rule names
let queries: Vec<&str> = content.split("union all").collect();

let mut generic_queries = Vec::new();
let mut supabase_queries = Vec::new();

for query in queries {
// Extract the rule name from the query (it's the first 'name' field)
let is_supabase = SUPABASE_ONLY_RULES
.iter()
.any(|rule| query.contains(&format!("'{rule}' as \"name!\"")));

let is_generic = GENERIC_RULES
.iter()
.any(|rule| query.contains(&format!("'{rule}' as \"name!\"")));

if is_supabase {
supabase_queries.push(query);
} else if is_generic {
generic_queries.push(query);
} else {
// Extract rule name for better error message
let rule_name = extract_rule_name_from_query(query);
panic!(
"Found unknown Splinter rule that is not categorized: {rule_name:?}\n\
Please add this rule to either GENERIC_RULES or SUPABASE_ONLY_RULES in build.rs.\n\
\n\
Guidelines:\n\
- GENERIC_RULES: Rules that work on any PostgreSQL database\n\
- SUPABASE_ONLY_RULES: Rules that require Supabase infrastructure (auth schema, roles, pgrst.db_schemas)\n\
\n\
This prevents new Supabase-specific rules from breaking linting on non-Supabase databases."
);
}
}

// Join queries with "union all" and wrap in parentheses
let generic_sql = if generic_queries.is_empty() {
String::new()
} else {
generic_queries.join("union all\n")
};

let supabase_sql = if supabase_queries.is_empty() {
String::new()
} else {
supabase_queries.join("union all\n")
};

(generic_sql, supabase_sql)
}
3 changes: 2 additions & 1 deletion crates/pgls_splinter/src/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl From<SplinterQueryResult> for SplinterDiagnostic {
schema,
object_name,
object_type,
remediation_url: result.remediation,
remediation: result.remediation,
additional_metadata,
},
}
Expand All @@ -47,6 +47,7 @@ fn parse_severity(level: &str) -> Severity {
/// Convert rule name and group to a Category
/// Note: Rule names use snake_case, but categories use camelCase
fn rule_name_to_category(name: &str, group: &str) -> &'static Category {
// we cannot use convert_case here because category! macro requires a string literal
match (group, name) {
("performance", "unindexed_foreign_keys") => {
category!("splinter/performance/unindexedForeignKeys")
Expand Down
6 changes: 3 additions & 3 deletions crates/pgls_splinter/src/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub struct SplinterAdvices {
pub object_type: Option<String>,

/// URL to documentation/remediation guide
pub remediation_url: String,
pub remediation: String,

/// Additional rule-specific metadata (e.g., fkey_name, column, indexes)
/// This contains fields that don't fit into the common structure
Expand Down Expand Up @@ -70,10 +70,10 @@ impl Advices for SplinterAdvices {
}
}

// Show remediation URL
// Show remediation
visitor.record_log(
LogCategory::Info,
&format!("Documentation: {}", &self.remediation_url),
&format!("Remediation: {}", &self.remediation),
)?;

Ok(())
Expand Down
22 changes: 12 additions & 10 deletions crates/pgls_splinter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pub struct SplinterParams<'a> {
pub conn: &'a PgPool,
}

async fn check_required_roles(conn: &PgPool) -> Result<bool, sqlx::Error> {
async fn check_supabase_roles(conn: &PgPool) -> Result<bool, sqlx::Error> {
let required_roles = ["anon", "authenticated", "service_role"];

let existing_roles: Vec<String> =
Expand All @@ -32,17 +32,19 @@ async fn check_required_roles(conn: &PgPool) -> Result<bool, sqlx::Error> {
pub async fn run_splinter(
params: SplinterParams<'_>,
) -> Result<Vec<SplinterDiagnostic>, sqlx::Error> {
// check if required supabase roles exist
// if they don't exist, return empty diagnostics since splinter is supabase-specific
// opened an issue to make it less supabase-specific: https://github.com/supabase/splinter/issues/135
let has_roles = check_required_roles(params.conn).await?;
if !has_roles {
return Ok(Vec::new());
}
let mut all_results = Vec::new();

let generic_results = query::load_generic_splinter_results(params.conn).await?;
all_results.extend(generic_results);

let results = query::load_splinter_results(params.conn).await?;
// Only run Supabase-specific rules if the required roles exist
let has_supabase_roles = check_supabase_roles(params.conn).await?;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nice! curious: did the supabase team confirm that this check is the best way to identify a supabase database?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nope, but for squawk this is what is required to exist for the rules query to not crash :D

if has_supabase_roles {
let supabase_results = query::load_supabase_splinter_results(params.conn).await?;
all_results.extend(supabase_results);
}

let diagnostics: Vec<SplinterDiagnostic> = results.into_iter().map(Into::into).collect();
let diagnostics: Vec<SplinterDiagnostic> = all_results.into_iter().map(Into::into).collect();

Ok(diagnostics)
}
34 changes: 29 additions & 5 deletions crates/pgls_splinter/src/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,18 +38,42 @@ pub struct SplinterQueryResult {
pub cache_key: String,
}

pub async fn load_splinter_results(pool: &PgPool) -> Result<Vec<SplinterQueryResult>, sqlx::Error> {
let mut conn = pool.acquire().await?;
pub async fn load_generic_splinter_results(
pool: &PgPool,
) -> Result<Vec<SplinterQueryResult>, sqlx::Error> {
let mut tx = pool.begin().await?;

// this is done by the splinter.sql file normally, but we remove it so that sqlx can work with
// the file properly.
sqlx::query("set local search_path = ''")
.execute(&mut *conn)
.execute(&mut *tx)
.await?;

let results = sqlx::query_file_as!(SplinterQueryResult, "vendor/splinter.sql")
.fetch_all(&mut *conn)
let results = sqlx::query_file_as!(SplinterQueryResult, "vendor/splinter_generic.sql")
.fetch_all(&mut *tx)
.await?;

tx.commit().await?;

Ok(results)
}

pub async fn load_supabase_splinter_results(
pool: &PgPool,
) -> Result<Vec<SplinterQueryResult>, sqlx::Error> {
let mut tx = pool.begin().await?;

// this is done by the splinter.sql file normally, but we remove it so that sqlx can work with
// the file properly.
sqlx::query("set local search_path = ''")
.execute(&mut *tx)
.await?;

let results = sqlx::query_file_as!(SplinterQueryResult, "vendor/splinter_supabase.sql")
.fetch_all(&mut *tx)
.await?;

tx.commit().await?;

Ok(results)
}
Loading