diff --git a/Cargo.lock b/Cargo.lock index 4da985a3..16b1de5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2693,6 +2693,7 @@ dependencies = [ "pgt_console", "pgt_diagnostics", "pgt_query_ext", + "pgt_schema_cache", "pgt_text_size", "rustc-hash 2.1.0", "schemars", @@ -2708,7 +2709,9 @@ dependencies = [ "pgt_console", "pgt_diagnostics", "pgt_query_ext", + "pgt_schema_cache", "pgt_test_macros", + "pgt_text_size", "serde", "termcolor", ] @@ -3102,6 +3105,7 @@ dependencies = [ "pgt_schema_cache", "pgt_statement_splitter", "pgt_suppressions", + "pgt_test_utils", "pgt_text_size", "pgt_typecheck", "rustc-hash 2.1.0", diff --git a/crates/pgt_analyse/Cargo.toml b/crates/pgt_analyse/Cargo.toml index 75eb0211..4d30784c 100644 --- a/crates/pgt_analyse/Cargo.toml +++ b/crates/pgt_analyse/Cargo.toml @@ -13,10 +13,11 @@ version = "0.0.0" [dependencies] -pgt_console.workspace = true -pgt_diagnostics.workspace = true -pgt_query_ext.workspace = true -rustc-hash = { workspace = true } +pgt_console.workspace = true +pgt_diagnostics.workspace = true +pgt_query_ext.workspace = true +pgt_schema_cache.workspace = true +rustc-hash = { workspace = true } biome_deserialize = { workspace = true, optional = true } biome_deserialize_macros = { workspace = true, optional = true } diff --git a/crates/pgt_analyse/src/analysed_file_context.rs b/crates/pgt_analyse/src/analysed_file_context.rs new file mode 100644 index 00000000..cba53eeb --- /dev/null +++ b/crates/pgt_analyse/src/analysed_file_context.rs @@ -0,0 +1,7 @@ +#[derive(Default)] +pub struct AnalysedFileContext {} + +impl AnalysedFileContext { + #[allow(unused)] + pub fn update_from(&mut self, stmt_root: &pgt_query_ext::NodeEnum) {} +} diff --git a/crates/pgt_analyse/src/context.rs b/crates/pgt_analyse/src/context.rs index cd069657..7447c0bb 100644 --- a/crates/pgt_analyse/src/context.rs +++ b/crates/pgt_analyse/src/context.rs @@ -1,4 +1,7 @@ +use pgt_schema_cache::SchemaCache; + use crate::{ + AnalysedFileContext, categories::RuleCategory, rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}, }; @@ -6,6 +9,8 @@ use crate::{ pub struct RuleContext<'a, R: Rule> { stmt: &'a pgt_query_ext::NodeEnum, options: &'a R::Options, + schema_cache: Option<&'a SchemaCache>, + file_context: &'a AnalysedFileContext, } impl<'a, R> RuleContext<'a, R> @@ -13,8 +18,18 @@ where R: Rule + Sized + 'static, { #[allow(clippy::too_many_arguments)] - pub fn new(stmt: &'a pgt_query_ext::NodeEnum, options: &'a R::Options) -> Self { - Self { stmt, options } + pub fn new( + stmt: &'a pgt_query_ext::NodeEnum, + options: &'a R::Options, + schema_cache: Option<&'a SchemaCache>, + file_context: &'a AnalysedFileContext, + ) -> Self { + Self { + stmt, + options, + schema_cache, + file_context, + } } /// Returns the group that belongs to the current rule @@ -32,6 +47,14 @@ where self.stmt } + pub fn file_context(&self) -> &AnalysedFileContext { + self.file_context + } + + pub fn schema_cache(&self) -> Option<&SchemaCache> { + self.schema_cache + } + /// Returns the metadata of the rule /// /// The metadata contains information about the rule, such as the name, version, language, and whether it is recommended. diff --git a/crates/pgt_analyse/src/lib.rs b/crates/pgt_analyse/src/lib.rs index f312de45..1d4ec6ae 100644 --- a/crates/pgt_analyse/src/lib.rs +++ b/crates/pgt_analyse/src/lib.rs @@ -1,3 +1,4 @@ +mod analysed_file_context; mod categories; pub mod context; mod filter; @@ -9,6 +10,7 @@ mod rule; // Re-exported for use in the `declare_group` macro pub use pgt_diagnostics::category_concat; +pub use crate::analysed_file_context::AnalysedFileContext; pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SUPPRESSION_ACTION_CATEGORY, SourceActionKind, diff --git a/crates/pgt_analyse/src/registry.rs b/crates/pgt_analyse/src/registry.rs index 48b73b15..d43d7711 100644 --- a/crates/pgt_analyse/src/registry.rs +++ b/crates/pgt_analyse/src/registry.rs @@ -2,6 +2,7 @@ use std::{borrow, collections::BTreeSet}; use crate::{ AnalyserOptions, + analysed_file_context::AnalysedFileContext, context::RuleContext, filter::{AnalysisFilter, GroupKey, RuleKey}, rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup}, @@ -158,6 +159,8 @@ impl RuleRegistry { pub struct RegistryRuleParams<'a> { pub root: &'a pgt_query_ext::NodeEnum, pub options: &'a AnalyserOptions, + pub analysed_file_context: &'a AnalysedFileContext, + pub schema_cache: Option<&'a pgt_schema_cache::SchemaCache>, } /// Executor for rule as a generic function pointer @@ -174,7 +177,14 @@ impl RegistryRule { R: Rule + 'static, { let options = params.options.rule_options::().unwrap_or_default(); - let ctx = RuleContext::new(params.root, &options); + + let ctx = RuleContext::new( + params.root, + &options, + params.schema_cache, + params.analysed_file_context, + ); + R::run(&ctx) } diff --git a/crates/pgt_analyse/src/rule.rs b/crates/pgt_analyse/src/rule.rs index fae3cda3..1760ce97 100644 --- a/crates/pgt_analyse/src/rule.rs +++ b/crates/pgt_analyse/src/rule.rs @@ -102,7 +102,8 @@ pub trait GroupCategory { pub trait Rule: RuleMeta + Sized { type Options: Default + Clone + Debug; - fn run(ctx: &RuleContext) -> Vec; + /// `schema_cache` will only be available if the user has a working database connection. + fn run(rule_context: &RuleContext) -> Vec; } /// Diagnostic object returned by a single analysis rule @@ -208,6 +209,12 @@ impl RuleDiagnostic { self } + /// Sets the span of this diagnostic. + pub fn span(mut self, span: TextRange) -> Self { + self.span = Some(span); + self + } + /// Marks this diagnostic as unnecessary code, which will /// be displayed in the language server. /// diff --git a/crates/pgt_analyser/Cargo.toml b/crates/pgt_analyser/Cargo.toml index e77aaa4f..5f65b978 100644 --- a/crates/pgt_analyser/Cargo.toml +++ b/crates/pgt_analyser/Cargo.toml @@ -12,11 +12,13 @@ repository.workspace = true version = "0.0.0" [dependencies] -pgt_analyse = { workspace = true } -pgt_console = { workspace = true } -pgt_diagnostics = { workspace = true } -pgt_query_ext = { workspace = true } -serde = { workspace = true } +pgt_analyse = { workspace = true } +pgt_console = { workspace = true } +pgt_diagnostics = { workspace = true } +pgt_query_ext = { workspace = true } +pgt_schema_cache = { workspace = true } +pgt_text_size = { workspace = true } +serde = { workspace = true } [dev-dependencies] insta = { version = "1.42.1" } diff --git a/crates/pgt_analyser/src/lib.rs b/crates/pgt_analyser/src/lib.rs index 248fe22b..f96b6f6d 100644 --- a/crates/pgt_analyser/src/lib.rs +++ b/crates/pgt_analyser/src/lib.rs @@ -1,8 +1,8 @@ use std::{ops::Deref, sync::LazyLock}; use pgt_analyse::{ - AnalyserOptions, AnalysisFilter, MetadataRegistry, RegistryRuleParams, RuleDiagnostic, - RuleRegistry, + AnalysedFileContext, AnalyserOptions, AnalysisFilter, MetadataRegistry, RegistryRuleParams, + RuleDiagnostic, RuleRegistry, }; pub use registry::visit_registry; @@ -30,8 +30,15 @@ pub struct Analyser<'a> { registry: RuleRegistry, } -pub struct AnalyserContext<'a> { - pub root: &'a pgt_query_ext::NodeEnum, +#[derive(Debug)] +pub struct AnalysableStatement { + pub root: pgt_query_ext::NodeEnum, + pub range: pgt_text_size::TextRange, +} + +pub struct AnalyserParams<'a> { + pub stmts: Vec, + pub schema_cache: Option<&'a pgt_schema_cache::SchemaCache>, } pub struct AnalyserConfig<'a> { @@ -52,17 +59,31 @@ impl<'a> Analyser<'a> { } } - pub fn run(&self, ctx: AnalyserContext) -> Vec { - let params = RegistryRuleParams { - root: ctx.root, - options: self.options, - }; + pub fn run(&self, params: AnalyserParams) -> Vec { + let mut diagnostics = vec![]; + + let mut file_context = AnalysedFileContext::default(); + + for stmt in params.stmts { + let rule_params = RegistryRuleParams { + root: &stmt.root, + options: self.options, + analysed_file_context: &file_context, + schema_cache: params.schema_cache, + }; - self.registry - .rules - .iter() - .flat_map(|rule| (rule.run)(¶ms)) - .collect::>() + diagnostics.extend( + self.registry + .rules + .iter() + .flat_map(|rule| (rule.run)(&rule_params)) + .map(|r| r.span(stmt.range)), + ); + + file_context.update_from(&stmt.root); + } + + diagnostics } } @@ -77,9 +98,10 @@ mod tests { markup, }; use pgt_diagnostics::PrintDiagnostic; + use pgt_text_size::TextRange; use termcolor::NoColor; - use crate::Analyser; + use crate::{AnalysableStatement, Analyser}; #[ignore] #[test] @@ -102,6 +124,7 @@ mod tests { }; let ast = pgt_query_ext::parse(SQL).expect("failed to parse SQL"); + let range = TextRange::new(0.into(), u32::try_from(SQL.len()).unwrap().into()); let options = AnalyserOptions::default(); @@ -110,7 +133,10 @@ mod tests { filter, }); - let results = analyser.run(crate::AnalyserContext { root: &ast }); + let results = analyser.run(crate::AnalyserParams { + stmts: vec![AnalysableStatement { root: ast, range }], + schema_cache: None, + }); println!("*******************"); for result in &results { diff --git a/crates/pgt_analyser/tests/rules_tests.rs b/crates/pgt_analyser/tests/rules_tests.rs index 247c02b0..0a6b47ec 100644 --- a/crates/pgt_analyser/tests/rules_tests.rs +++ b/crates/pgt_analyser/tests/rules_tests.rs @@ -2,7 +2,7 @@ use core::slice; use std::{fmt::Write, fs::read_to_string, path::Path}; use pgt_analyse::{AnalyserOptions, AnalysisFilter, RuleDiagnostic, RuleFilter}; -use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; +use pgt_analyser::{AnalysableStatement, Analyser, AnalyserConfig, AnalyserParams}; use pgt_console::StdDisplay; use pgt_diagnostics::PrintDiagnostic; @@ -32,7 +32,15 @@ fn rule_test(full_path: &'static str, _: &str, _: &str) { filter, }); - let results = analyser.run(AnalyserContext { root: &ast }); + let stmt = AnalysableStatement { + root: ast, + range: pgt_text_size::TextRange::new(0.into(), u32::try_from(query.len()).unwrap().into()), + }; + + let results = analyser.run(AnalyserParams { + stmts: vec![stmt], + schema_cache: None, + }); let mut snapshot = String::new(); write_snapshot(&mut snapshot, query.as_str(), results.as_slice()); diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 353e80ae..176868f5 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1734,13 +1734,24 @@ $$; server.open_document(initial_content).await?; - let notification = tokio::time::timeout(Duration::from_secs(5), async { + let got_notification = tokio::time::timeout(Duration::from_secs(5), async { loop { match receiver.next().await { Some(ServerNotification::PublishDiagnostics(msg)) => { if msg.diagnostics.iter().any(|d| { d.message .contains("Invalid statement: syntax error at or near \"declre\"") + && d.range + == Range { + start: Position { + line: 5, + character: 9, + }, + end: Position { + line: 11, + character: 0, + }, + } }) { return true; } @@ -1752,7 +1763,10 @@ $$; .await .is_ok(); - assert!(notification, "expected diagnostics for unknown column"); + assert!( + got_notification, + "expected diagnostics for invalid declare statement" + ); server.shutdown().await?; reader.abort(); diff --git a/crates/pgt_query_ext/src/diagnostics.rs b/crates/pgt_query_ext/src/diagnostics.rs index 7e3f0a37..1a068dc0 100644 --- a/crates/pgt_query_ext/src/diagnostics.rs +++ b/crates/pgt_query_ext/src/diagnostics.rs @@ -9,7 +9,7 @@ use pgt_text_size::TextRange; pub struct SyntaxDiagnostic { /// The location where the error is occurred #[location(span)] - span: Option, + pub span: Option, #[message] #[description] pub message: MessageAndDescription, @@ -23,6 +23,11 @@ impl SyntaxDiagnostic { message: MessageAndDescription::from(message.into()), } } + + pub fn span(mut self, span: TextRange) -> Self { + self.span = Some(span); + self + } } impl From for SyntaxDiagnostic { diff --git a/crates/pgt_workspace/Cargo.toml b/crates/pgt_workspace/Cargo.toml index f535e505..3ef4936b 100644 --- a/crates/pgt_workspace/Cargo.toml +++ b/crates/pgt_workspace/Cargo.toml @@ -62,7 +62,9 @@ schema = [ ] [dev-dependencies] -tempfile = "3.15.0" +pgt_test_utils = { workspace = true } +sqlx = { workspace = true } +tempfile = "3.15.0" [lib] doctest = false diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index 399f2ec6..e6456afc 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -10,12 +10,12 @@ use analyser::AnalyserVisitorBuilder; use async_helper::run_async; use connection_manager::ConnectionManager; use document::{ - AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, Document, ExecuteStatementMapper, - SyncDiagnosticsMapper, + CursorPositionFilter, DefaultMapper, Document, ExecuteStatementMapper, + TypecheckDiagnosticsMapper, }; use futures::{StreamExt, stream}; use pgt_analyse::{AnalyserOptions, AnalysisFilter}; -use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; +use pgt_analyser::{Analyser, AnalyserConfig, AnalyserParams}; use pgt_diagnostics::{ Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, }; @@ -37,6 +37,7 @@ use crate::{ diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, settings::{WorkspaceSettings, WorkspaceSettingsHandle, WorkspaceSettingsHandleMut}, + workspace::AnalyserDiagnosticsMapper, }; use super::{ @@ -444,7 +445,7 @@ impl Workspace for WorkspaceServer { if let Some(pool) = self.get_current_connection() { let path_clone = params.path.clone(); let schema_cache = self.schema_cache.load(pool.clone())?; - let input = doc.iter(AsyncDiagnosticsMapper).collect::>(); + let input = doc.iter(TypecheckDiagnosticsMapper).collect::>(); // sorry for the ugly code :( let async_results = run_async(async move { stream::iter(input) @@ -527,50 +528,49 @@ impl Workspace for WorkspaceServer { filter, }); + let path = params.path.as_path().display().to_string(); + + let schema_cache = self + .get_current_connection() + .and_then(|pool| self.schema_cache.load(pool.clone()).ok()); + + let mut analysable_stmts = vec![]; + for (stmt_root, diagnostic) in doc.iter(AnalyserDiagnosticsMapper) { + if let Some(node) = stmt_root { + analysable_stmts.push(node); + } + if let Some(diag) = diagnostic { + diagnostics.push(SDiagnostic::new( + diag.with_file_path(path.clone()) + .with_severity(Severity::Error), + )); + } + } + diagnostics.extend( - doc.iter(SyncDiagnosticsMapper) - .flat_map(|(range, ast, diag)| { - let mut errors: Vec = vec![]; - - if let Some(diag) = diag { - errors.push(diag.into()); - } - - if let Some(ast) = ast { - errors.extend( - analyser - .run(AnalyserContext { root: &ast }) - .into_iter() - .map(Error::from) - .collect::>(), - ); - } - - errors - .into_iter() - .map(|d| { - let severity = d - .category() - .filter(|category| category.name().starts_with("lint/")) - .map_or_else( - || d.severity(), - |category| { - settings - .get_severity_from_rule_code(category) - .unwrap_or(Severity::Warning) - }, - ); - - // adjust the span of the diagnostics to the statement (if it has one) - let span = d.location().span.map(|s| s + range.start()); - - SDiagnostic::new( - d.with_file_path(params.path.as_path().display().to_string()) - .with_file_span(span.unwrap_or(range)) - .with_severity(severity), - ) + analyser + .run(AnalyserParams { + stmts: analysable_stmts, + schema_cache: schema_cache.as_deref(), + }) + .into_iter() + .map(Error::from) + .map(|d| { + let severity = d + .category() + .map(|category| { + settings + .get_severity_from_rule_code(category) + .unwrap_or(Severity::Warning) }) - .collect::>() + .unwrap(); + + let span = d.location().span; + SDiagnostic::new( + d.with_file_path(path.clone()) + .with_file_span(span) + .with_severity(severity), + ) }), ); @@ -655,3 +655,7 @@ impl Workspace for WorkspaceServer { fn is_dir(path: &Path) -> bool { path.is_dir() || (path.is_symlink() && fs::read_link(path).is_ok_and(|path| path.is_dir())) } + +#[cfg(test)] +#[path = "server.tests.rs"] +mod tests; diff --git a/crates/pgt_workspace/src/workspace/server.tests.rs b/crates/pgt_workspace/src/workspace/server.tests.rs new file mode 100644 index 00000000..94b02cd5 --- /dev/null +++ b/crates/pgt_workspace/src/workspace/server.tests.rs @@ -0,0 +1,99 @@ +use biome_deserialize::Merge; +use pgt_analyse::RuleCategories; +use pgt_configuration::{PartialConfiguration, database::PartialDatabaseConfiguration}; +use pgt_diagnostics::Diagnostic; +use pgt_fs::PgTPath; +use pgt_text_size::TextRange; +use sqlx::PgPool; + +use crate::{ + Workspace, WorkspaceError, + workspace::{ + OpenFileParams, RegisterProjectFolderParams, UpdateSettingsParams, server::WorkspaceServer, + }, +}; + +fn get_test_workspace( + partial_config: Option, +) -> Result { + let workspace = WorkspaceServer::new(); + + workspace.register_project_folder(RegisterProjectFolderParams { + path: None, + set_as_current_workspace: true, + })?; + + workspace.update_settings(UpdateSettingsParams { + configuration: partial_config.unwrap_or(PartialConfiguration::init()), + gitignore_matches: vec![], + vcs_base_path: None, + workspace_directory: None, + })?; + + Ok(workspace) +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_diagnostics(test_db: PgPool) { + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + + let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace"); + + let path = PgTPath::new("test.sql"); + let content = r#" + create table users ( + id serial primary key, + name text not null + ); + + drop table non_existing_table; + + select 1; + "#; + + workspace + .open_file(OpenFileParams { + path: path.clone(), + content: content.into(), + version: 1, + }) + .expect("Unable to open test file"); + + let diagnostics = workspace + .pull_diagnostics(crate::workspace::PullDiagnosticsParams { + path: path.clone(), + categories: RuleCategories::all(), + max_diagnostics: 100, + only: vec![], + skip: vec![], + }) + .expect("Unable to pull diagnostics") + .diagnostics; + + assert_eq!(diagnostics.len(), 1, "Expected one diagnostic"); + + let diagnostic = &diagnostics[0]; + + assert_eq!( + diagnostic.category().map(|c| c.name()), + Some("lint/safety/banDropTable") + ); + + assert_eq!( + diagnostic.location().span, + Some(TextRange::new(106.into(), 136.into())) + ); +} diff --git a/crates/pgt_workspace/src/workspace/server/document.rs b/crates/pgt_workspace/src/workspace/server/document.rs index 9d3700df..b5798370 100644 --- a/crates/pgt_workspace/src/workspace/server/document.rs +++ b/crates/pgt_workspace/src/workspace/server/document.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use pgt_analyser::AnalysableStatement; use pgt_diagnostics::{Diagnostic, DiagnosticExt, serde::Diagnostic as SDiagnostic}; use pgt_query_ext::diagnostics::SyntaxDiagnostic; use pgt_suppressions::Suppressions; @@ -208,8 +209,8 @@ impl<'a> StatementMapper<'a> for ExecuteStatementMapper { } } -pub struct AsyncDiagnosticsMapper; -impl<'a> StatementMapper<'a> for AsyncDiagnosticsMapper { +pub struct TypecheckDiagnosticsMapper; +impl<'a> StatementMapper<'a> for TypecheckDiagnosticsMapper { type Output = ( StatementId, TextRange, @@ -240,22 +241,20 @@ impl<'a> StatementMapper<'a> for AsyncDiagnosticsMapper { } } -pub struct SyncDiagnosticsMapper; -impl<'a> StatementMapper<'a> for SyncDiagnosticsMapper { - type Output = ( - TextRange, - Option, - Option, - ); +pub struct AnalyserDiagnosticsMapper; +impl<'a> StatementMapper<'a> for AnalyserDiagnosticsMapper { + type Output = (Option, Option); fn map(&self, parser: &'a Document, id: StatementId, range: TextRange) -> Self::Output { - let ast_result = parser.ast_db.get_or_cache_ast(&id); + let maybe_node = parser.ast_db.get_or_cache_ast(&id); - let (ast_option, diagnostics) = match &*ast_result { + let (ast_option, diagnostics) = match &*maybe_node { Ok(node) => { let plpgsql_result = parser.ast_db.get_or_cache_plpgsql_parse(&id); if let Some(Err(diag)) = plpgsql_result { - (Some(node.clone()), Some(diag.clone())) + // offset the pgpsql diagnostic from the parent statement start + let span = diag.location().span.map(|sp| sp + range.start()); + (Some(node.clone()), Some(diag.span(span.unwrap_or(range)))) } else { (Some(node.clone()), None) } @@ -263,7 +262,10 @@ impl<'a> StatementMapper<'a> for SyncDiagnosticsMapper { Err(diag) => (None, Some(diag.clone())), }; - (range, ast_option, diagnostics) + ( + ast_option.map(|root| AnalysableStatement { range, root }), + diagnostics, + ) } } @@ -401,10 +403,10 @@ END; $$;"; let d = Document::new(input.to_string(), 1); - let results = d.iter(SyncDiagnosticsMapper).collect::>(); + let results = d.iter(AnalyserDiagnosticsMapper).collect::>(); assert_eq!(results.len(), 1); - let (_range, ast, diagnostic) = &results[0]; + let (ast, diagnostic) = &results[0]; // Should have parsed the CREATE FUNCTION statement assert!(ast.is_some()); @@ -432,10 +434,10 @@ END; $$;"; let d = Document::new(input.to_string(), 1); - let results = d.iter(SyncDiagnosticsMapper).collect::>(); + let results = d.iter(AnalyserDiagnosticsMapper).collect::>(); assert_eq!(results.len(), 1); - let (_range, ast, diagnostic) = &results[0]; + let (ast, diagnostic) = &results[0]; // Should have parsed the CREATE FUNCTION statement assert!(ast.is_some()); @@ -458,15 +460,15 @@ $$;"; let d = Document::new(input.to_string(), 1); - let results1 = d.iter(SyncDiagnosticsMapper).collect::>(); + let results1 = d.iter(AnalyserDiagnosticsMapper).collect::>(); assert_eq!(results1.len(), 1); - assert!(results1[0].1.is_some()); - assert!(results1[0].2.is_none()); + assert!(results1[0].0.is_some()); + assert!(results1[0].1.is_none()); - let results2 = d.iter(SyncDiagnosticsMapper).collect::>(); + let results2 = d.iter(AnalyserDiagnosticsMapper).collect::>(); assert_eq!(results2.len(), 1); - assert!(results2[0].1.is_some()); - assert!(results2[0].2.is_none()); + assert!(results2[0].0.is_some()); + assert!(results2[0].1.is_none()); } #[test] @@ -513,7 +515,7 @@ END; $$ LANGUAGE plpgsql;"; let d = Document::new(input.to_string(), 1); - let results = d.iter(AsyncDiagnosticsMapper).collect::>(); + let results = d.iter(TypecheckDiagnosticsMapper).collect::>(); assert_eq!(results.len(), 1); let (_id, _range, ast, cst, sql_fn_sig) = &results[0]; @@ -532,7 +534,7 @@ $$ LANGUAGE plpgsql;"; "CREATE FUNCTION add(a int, b int) RETURNS int AS 'SELECT $1 + $2;' LANGUAGE sql;"; let d = Document::new(input.to_string(), 1); - let results = d.iter(AsyncDiagnosticsMapper).collect::>(); + let results = d.iter(TypecheckDiagnosticsMapper).collect::>(); assert_eq!(results.len(), 2); // Check the function body diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs index 68db53db..1d4b86a9 100644 --- a/docs/codegen/src/rules_docs.rs +++ b/docs/codegen/src/rules_docs.rs @@ -1,7 +1,7 @@ use anyhow::{Result, bail}; use biome_string_case::Case; use pgt_analyse::{AnalyserOptions, AnalysisFilter, RuleFilter, RuleMetadata}; -use pgt_analyser::{Analyser, AnalyserConfig}; +use pgt_analyser::{AnalysableStatement, Analyser, AnalyserConfig}; use pgt_console::StdDisplay; use pgt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; use pgt_query_ext::diagnostics::SyntaxDiagnostic; @@ -443,10 +443,16 @@ fn print_diagnostics( // split and parse each statement let stmts = pgt_statement_splitter::split(code); - for stmt in stmts.ranges { - match pgt_query_ext::parse(&code[stmt]) { + for stmt_range in stmts.ranges { + match pgt_query_ext::parse(&code[stmt_range]) { Ok(ast) => { - for rule_diag in analyser.run(pgt_analyser::AnalyserContext { root: &ast }) { + for rule_diag in analyser.run(pgt_analyser::AnalyserParams { + schema_cache: None, + stmts: vec![AnalysableStatement { + range: stmt_range, + root: ast, + }], + }) { let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); let category = diag.category().expect("linter diagnostic has no code"); diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md index 0c46d40a..28b3a4b5 100644 --- a/docs/rules/ban-drop-column.md +++ b/docs/rules/ban-drop-column.md @@ -25,10 +25,14 @@ alter table test drop column id; ``` ```sh -code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +code-block.sql:1:1 lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ! Dropping a column may break existing clients. + > 1 │ alter table test drop column id; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 2 │ + i You can leave the column as nullable or delete the column once queries no longer select or modify the column. diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md index b860c45c..56ec33c7 100644 --- a/docs/rules/ban-drop-not-null.md +++ b/docs/rules/ban-drop-not-null.md @@ -25,10 +25,14 @@ alter table users alter column email drop not null; ``` ```sh -code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +code-block.sql:1:1 lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ! Dropping a NOT NULL constraint may break existing clients. + > 1 │ alter table users alter column email drop not null; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 2 │ + i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. diff --git a/docs/rules/ban-drop-table.md b/docs/rules/ban-drop-table.md index 4b81755f..8aeb6e2c 100644 --- a/docs/rules/ban-drop-table.md +++ b/docs/rules/ban-drop-table.md @@ -26,10 +26,14 @@ drop table some_table; ``` ```sh -code-block.sql lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +code-block.sql:1:1 lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ! Dropping a table may break existing clients. + > 1 │ drop table some_table; + │ ^^^^^^^^^^^^^^^^^^^^^^ + 2 │ + i Update your application code to no longer read or write the table, and only then delete the table. Be sure to create a backup. diff --git a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts index 72b77bc1..971f07ec 100644 --- a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts +++ b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts @@ -435,17 +435,10 @@ export interface OpenFileParams { version: number; } export interface ChangeFileParams { - changes: ChangeParams[]; + content: string; path: PgTPath; version: number; } -export interface ChangeParams { - /** - * The range of the file that changed. If `None`, the whole file changed. - */ - range?: TextRange; - text: string; -} export interface CloseFileParams { path: PgTPath; } diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index fc225712..343b0673 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -41,10 +41,11 @@ fn generate_rule_template( format!( r#"use pgt_analyse::{{ - context::RuleContext, {macro_name}, Rule, RuleDiagnostic + AnalysedFileContext, context::RuleContext, {macro_name}, Rule, RuleDiagnostic, }}; use pgt_console::markup; use pgt_diagnostics::Severity; +use pgt_schema_cache::SchemaCache; {macro_name}! {{ /// Succinct description of the rule. @@ -78,7 +79,11 @@ use pgt_diagnostics::Severity; impl Rule for {rule_name_upper_camel} {{ type Options = (); - fn run(ctx: &RuleContext) -> Vec {{ + fn run( + ctx: &RuleContext + _file_context: &AnalysedFileContext, + _schema_cache: Option<&SchemaCache>, + ) -> Vec {{ Vec::new() }} }} diff --git a/xtask/rules_check/src/lib.rs b/xtask/rules_check/src/lib.rs index da4b4c73..0c57d06f 100644 --- a/xtask/rules_check/src/lib.rs +++ b/xtask/rules_check/src/lib.rs @@ -7,7 +7,7 @@ use pgt_analyse::{ AnalyserOptions, AnalysisFilter, GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleFilter, RuleGroup, RuleMetadata, }; -use pgt_analyser::{Analyser, AnalyserConfig}; +use pgt_analyser::{AnalysableStatement, Analyser, AnalyserConfig}; use pgt_console::{markup, Console}; use pgt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; use pgt_query_ext::diagnostics::SyntaxDiagnostic; @@ -127,10 +127,16 @@ fn assert_lint( }); let result = pgt_statement_splitter::split(code); - for stmt in result.ranges { - match pgt_query_ext::parse(&code[stmt]) { + for stmt_range in result.ranges { + match pgt_query_ext::parse(&code[stmt_range]) { Ok(ast) => { - for rule_diag in analyser.run(pgt_analyser::AnalyserContext { root: &ast }) { + for rule_diag in analyser.run(pgt_analyser::AnalyserParams { + schema_cache: None, + stmts: vec![AnalysableStatement { + range: stmt_range, + root: ast, + }], + }) { let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); let category = diag.category().expect("linter diagnostic has no code");