Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,8 @@ jobs:
run: cargo run -p xtask_codegen -- configuration
- name: Run the bindings codegen
run: cargo run -p xtask_codegen -- bindings
- name: Run the splinter codegen
run: cargo run -p xtask_codegen -- splinter
- name: Run the docs codegen
run: cargo run -p docs_codegen
- name: Check for git diff -- run "just ready" if you see an error
Expand Down

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ pgls_query = { path = "./crates/pgls_query", version = "0.0.0"
pgls_query_ext = { path = "./crates/pgls_query_ext", version = "0.0.0" }
pgls_query_macros = { path = "./crates/pgls_query_macros", version = "0.0.0" }
pgls_schema_cache = { path = "./crates/pgls_schema_cache", version = "0.0.0" }
pgls_splinter = { path = "./crates/pgls_splinter", version = "0.0.0" }
pgls_statement_splitter = { path = "./crates/pgls_statement_splitter", version = "0.0.0" }
pgls_suppressions = { path = "./crates/pgls_suppressions", version = "0.0.0" }
pgls_text_edit = { path = "./crates/pgls_text_edit", version = "0.0.0" }
Expand Down
31 changes: 31 additions & 0 deletions crates/pgls_diagnostics_categories/src/categories.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,30 @@ define_categories! {
"lint/safety/runningStatementWhileHoldingAccessExclusive": "https://pg-language-server.com/latest/rules/running-statement-while-holding-access-exclusive",
"lint/safety/transactionNesting": "https://pg-language-server.com/latest/rules/transaction-nesting",
// end lint rules
// splinter rules start
"splinter/performance/authRlsInitplan": "https://supabase.com/docs/guides/database/database-linter?lint=0003_auth_rls_initplan",
"splinter/performance/duplicateIndex": "https://supabase.com/docs/guides/database/database-linter?lint=0009_duplicate_index",
"splinter/performance/multiplePermissivePolicies": "https://supabase.com/docs/guides/database/database-linter?lint=0006_multiple_permissive_policies",
"splinter/performance/noPrimaryKey": "https://supabase.com/docs/guides/database/database-linter?lint=0004_no_primary_key",
"splinter/performance/tableBloat": "https://supabase.com/docs/guides/database/database-linter?lint=0020_table_bloat",
"splinter/performance/unindexedForeignKeys": "https://supabase.com/docs/guides/database/database-linter?lint=0001_unindexed_foreign_keys",
"splinter/performance/unusedIndex": "https://supabase.com/docs/guides/database/database-linter?lint=0005_unused_index",
"splinter/security/authUsersExposed": "https://supabase.com/docs/guides/database/database-linter?lint=0002_auth_users_exposed",
"splinter/security/extensionInPublic": "https://supabase.com/docs/guides/database/database-linter?lint=0014_extension_in_public",
"splinter/security/extensionVersionsOutdated": "https://supabase.com/docs/guides/database/database-linter?lint=0022_extension_versions_outdated",
"splinter/security/fkeyToAuthUnique": "https://supabase.com/docs/guides/database/database-linter?lint=0021_fkey_to_auth_unique",
"splinter/security/foreignTableInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0017_foreign_table_in_api",
"splinter/security/functionSearchPathMutable": "https://supabase.com/docs/guides/database/database-linter?lint=0011_function_search_path_mutable",
"splinter/security/insecureQueueExposedInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0019_insecure_queue_exposed_in_api",
"splinter/security/materializedViewInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0016_materialized_view_in_api",
"splinter/security/policyExistsRlsDisabled": "https://supabase.com/docs/guides/database/database-linter?lint=0007_policy_exists_rls_disabled",
"splinter/security/rlsDisabledInPublic": "https://supabase.com/docs/guides/database/database-linter?lint=0013_rls_disabled_in_public",
"splinter/security/rlsEnabledNoPolicy": "https://supabase.com/docs/guides/database/database-linter?lint=0008_rls_enabled_no_policy",
"splinter/security/rlsReferencesUserMetadata": "https://supabase.com/docs/guides/database/database-linter?lint=0015_rls_references_user_metadata",
"splinter/security/securityDefinerView": "https://supabase.com/docs/guides/database/database-linter?lint=0010_security_definer_view",
"splinter/security/unsupportedRegTypes": "https://supabase.com/docs/guides/database/database-linter?lint=unsupported_reg_types",
"splinter/unknown/unknown": "https://pg-language-server.com/latest",
// splinter rules end
;
// General categories
"stdin",
Expand All @@ -69,4 +93,11 @@ define_categories! {
"lint/performance",
"lint/safety",
// Lint groups end

// Splinter groups start
"splinter",
"splinter/performance",
"splinter/security",
"splinter/unknown",
// Splinter groups end
}
28 changes: 28 additions & 0 deletions crates/pgls_splinter/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
[package]
authors.workspace = true
categories.workspace = true
description = "<DESCRIPTION>"
edition.workspace = true
homepage.workspace = true
keywords.workspace = true
license.workspace = true
name = "pgls_splinter"
repository.workspace = true
version = "0.0.0"

[dependencies]
pgls_diagnostics.workspace = true
serde.workspace = true
serde_json.workspace = true
sqlx.workspace = true

[build-dependencies]
ureq = "2.10"

[dev-dependencies]
insta.workspace = true
pgls_console.workspace = true
pgls_test_utils.workspace = true

[lib]
doctest = false
1 change: 1 addition & 0 deletions crates/pgls_splinter/TODO.md
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

personal note?

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
- implement ignore / include and config. try to refactor existing analyser infrastructure to be re-used.
109 changes: 109 additions & 0 deletions crates/pgls_splinter/build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
use std::env;
use std::fs;
use std::path::Path;

// Update this commit SHA to pull in a new version of splinter.sql
const SPLINTER_COMMIT_SHA: &str = "27ea2ece65464213e466cd969cc61b6940d16219";

fn main() {
let out_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let vendor_dir = Path::new(&out_dir).join("vendor");
let sql_file = vendor_dir.join("splinter.sql");
let sha_file = vendor_dir.join("COMMIT_SHA.txt");

// Create vendor directory if it doesn't exist
if !vendor_dir.exists() {
fs::create_dir_all(&vendor_dir).expect("Failed to create vendor directory");
}

// Check if we need to download
let needs_download = if !sql_file.exists() || !sha_file.exists() {
true
} else {
// Check if stored SHA matches current constant
let stored_sha = fs::read_to_string(&sha_file)
.expect("Failed to read COMMIT_SHA.txt")
.trim()
.to_string();
stored_sha != SPLINTER_COMMIT_SHA
};

if needs_download {
println!(
"cargo:warning=Downloading splinter.sql from GitHub (commit: {SPLINTER_COMMIT_SHA})"
);
download_and_process_sql(&sql_file);
fs::write(&sha_file, SPLINTER_COMMIT_SHA).expect("Failed to write COMMIT_SHA.txt");
}

// Tell cargo to rerun if build.rs or SHA file changes
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=vendor/COMMIT_SHA.txt");
}

fn download_and_process_sql(dest_path: &Path) {
let url = format!(
"https://raw.githubusercontent.com/supabase/splinter/{SPLINTER_COMMIT_SHA}/splinter.sql"
);

// Download the file
let response = ureq::get(&url)
.call()
.expect("Failed to download splinter.sql");

let content = response
.into_string()
.expect("Failed to read response body");

// Remove the SET LOCAL search_path section
let mut processed_content = remove_set_search_path(&content);

// Add "!" suffix to column aliases for sqlx non-null checking
processed_content = add_not_null_markers(&processed_content);

// Write to destination
fs::write(dest_path, processed_content).expect("Failed to write splinter.sql");

println!("cargo:warning=Successfully downloaded and processed splinter.sql");
}

fn remove_set_search_path(content: &str) -> String {
content
.lines()
.filter(|line| {
let trimmed = line.trim();
!trimmed.to_lowercase().starts_with("set local search_path")
})
.collect::<Vec<_>>()
.join("\n")
}

fn add_not_null_markers(content: &str) -> String {
// Add "!" suffix to all column aliases to mark them as non-null for sqlx
// This transforms patterns like: 'value' as name
// Into: 'value' as "name!"

let columns_to_mark = [
"name",
"title",
"level",
"facing",
"categories",
"description",
"detail",
"remediation",
"metadata",
"cache_key",
];

let mut result = content.to_string();

for column in &columns_to_mark {
// Match patterns like: as name, as name)
let pattern_comma = format!(" as {column}");
let replacement_comma = format!(" as \"{column}!\"");
result = result.replace(&pattern_comma, &replacement_comma);
}

result
}
136 changes: 136 additions & 0 deletions crates/pgls_splinter/src/convert.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
use pgls_diagnostics::{Category, Severity, category};
use serde_json::Value;

use crate::{SplinterAdvices, SplinterDiagnostic, SplinterQueryResult};

impl From<SplinterQueryResult> for SplinterDiagnostic {
fn from(result: SplinterQueryResult) -> Self {
let severity = parse_severity(&result.level);

// Extract common fields from metadata
let (schema, object_name, object_type, additional_metadata) =
extract_metadata_fields(&result.metadata);

// for now, we just take the first category as the group
let group = result
.categories
.first()
.map(|s| s.to_lowercase())
.unwrap_or_else(|| "unknown".to_string());

SplinterDiagnostic {
category: rule_name_to_category(&result.name, &group),
message: result.detail.into(),
severity,
advices: SplinterAdvices {
description: result.description,
schema,
object_name,
object_type,
remediation_url: result.remediation,
additional_metadata,
},
}
}
}

/// Parse severity level from the query result
fn parse_severity(level: &str) -> Severity {
match level {
"INFO" => Severity::Information,
"WARN" => Severity::Warning,
"ERROR" => Severity::Error,
_ => Severity::Information, // default to info
}
}

/// Convert rule name and group to a Category
/// Note: Rule names use snake_case, but categories use camelCase
fn rule_name_to_category(name: &str, group: &str) -> &'static Category {
match (group, name) {
("performance", "unindexed_foreign_keys") => {
category!("splinter/performance/unindexedForeignKeys")
}
("performance", "auth_rls_initplan") => {
category!("splinter/performance/authRlsInitplan")
}
("performance", "no_primary_key") => category!("splinter/performance/noPrimaryKey"),
("performance", "unused_index") => category!("splinter/performance/unusedIndex"),
("performance", "duplicate_index") => category!("splinter/performance/duplicateIndex"),
("performance", "table_bloat") => category!("splinter/performance/tableBloat"),
("performance", "multiple_permissive_policies") => {
category!("splinter/performance/multiplePermissivePolicies")
}
("security", "auth_users_exposed") => category!("splinter/security/authUsersExposed"),
("security", "extension_versions_outdated") => {
category!("splinter/security/extensionVersionsOutdated")
}
("security", "policy_exists_rls_disabled") => {
category!("splinter/security/policyExistsRlsDisabled")
}
("security", "rls_enabled_no_policy") => {
category!("splinter/security/rlsEnabledNoPolicy")
}
("security", "security_definer_view") => {
category!("splinter/security/securityDefinerView")
}
("security", "function_search_path_mutable") => {
category!("splinter/security/functionSearchPathMutable")
}
("security", "rls_disabled_in_public") => {
category!("splinter/security/rlsDisabledInPublic")
}
("security", "extension_in_public") => category!("splinter/security/extensionInPublic"),
("security", "rls_references_user_metadata") => {
category!("splinter/security/rlsReferencesUserMetadata")
}
("security", "materialized_view_in_api") => {
category!("splinter/security/materializedViewInApi")
}
("security", "foreign_table_in_api") => {
category!("splinter/security/foreignTableInApi")
}
("security", "unsupported_reg_types") => {
category!("splinter/security/unsupportedRegTypes")
}
("security", "insecure_queue_exposed_in_api") => {
category!("splinter/security/insecureQueueExposedInApi")
}
("security", "fkey_to_auth_unique") => category!("splinter/security/fkeyToAuthUnique"),
_ => category!("splinter/unknown/unknown"),
}
}

/// Extract common metadata fields and return the rest as additional_metadata
fn extract_metadata_fields(
metadata: &Value,
) -> (
Option<String>,
Option<String>,
Option<String>,
Option<Value>,
) {
if let Some(obj) = metadata.as_object() {
let schema = obj.get("schema").and_then(|v| v.as_str()).map(String::from);

let object_name = obj.get("name").and_then(|v| v.as_str()).map(String::from);

let object_type = obj.get("type").and_then(|v| v.as_str()).map(String::from);

// Create a new object without the common fields
let mut additional = obj.clone();
additional.remove("schema");
additional.remove("name");
additional.remove("type");

let additional_metadata = if additional.is_empty() {
None
} else {
Some(Value::Object(additional))
};

(schema, object_name, object_type, additional_metadata)
} else {
(None, None, None, Some(metadata.clone()))
}
}
Loading