From 6991fa62314644183c5b846e140cb6b6d9d14063 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 16:17:47 +0100 Subject: [PATCH 1/7] feat: checkpoint MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit feat: checkpoint (II) feat: checkpoint (III) feat: snapshot vec feat: add dedicated filter feat: checkpoint feat: filter implementation feat: filter implementation (mostly) done chore: environment capture note chore: always postgres bigint feat: target clone feat: simplify lookup feat: move storage up feat: eval entity path chore: checkpoint chore: checkpoint chore: find entrypoint feat: eval context feat: eval cleanup chore: cleanup feat: track index feat: wire up filter feat: add error reporting chore: checkpoint feat: add traverse, and first postgres compiler outline feat: traverse bitmap feat: move traversal out feat: projections feat: projections fix: clippy feat: subquery projection for lateral feat: checkpoint feat: test plan feat: checkpoint feat: checkpoint – failing tests ;-; feat: checkpoint – failing tests ;-; feat: checkpoint — passing tests fix: import fix: entity type feat: checkpoint feat: attribute a cost to terminator placement switches fix: import feat: checkpoint feat: checkpoint chore: lint --- libs/@local/hashql/compiletest/src/lib.rs | 1 + .../compiletest/src/suite/eval_postgres.rs | 206 ++++ .../suite/mir_pass_transform_pre_inline.rs | 4 + .../hashql/compiletest/src/suite/mod.rs | 4 +- libs/@local/hashql/eval/Cargo.toml | 13 +- libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md | 605 ++++++++++ libs/@local/hashql/eval/src/context.rs | 119 ++ .../hashql/eval/src/postgres/continuation.rs | 145 +++ libs/@local/hashql/eval/src/postgres/error.rs | 310 +++++ .../hashql/eval/src/postgres/filter/mod.rs | 788 +++++++++++++ .../hashql/eval/src/postgres/filter/tests.rs | 1007 +++++++++++++++++ libs/@local/hashql/eval/src/postgres/mod.rs | 374 ++++++ .../hashql/eval/src/postgres/parameters.rs | 245 ++++ .../hashql/eval/src/postgres/projections.rs | 406 +++++++ .../hashql/eval/src/postgres/traverse.rs | 150 +++ .../hashql/eval/tests/ui/postgres/.spec.toml | 1 + .../postgres/arithmetic-addition-casts.jsonc | 17 + .../ui/postgres/comparison-no-cast.aux.mir | 76 ++ .../ui/postgres/comparison-no-cast.jsonc | 14 + .../ui/postgres/comparison-no-cast.stdout | 14 + .../ui/postgres/constant-true-filter.aux.mir | 34 + .../ui/postgres/constant-true-filter.jsonc | 11 + .../ui/postgres/constant-true-filter.stdout | 10 + .../ui/postgres/dict-construction.aux.mir | 80 ++ .../tests/ui/postgres/dict-construction.jsonc | 20 + .../ui/postgres/dict-construction.stdout | 14 + .../ui/postgres/entity-archived-check.aux.mir | 38 + .../ui/postgres/entity-archived-check.jsonc | 14 + .../ui/postgres/entity-archived-check.stdout | 14 + .../postgres/entity-draft-id-equality.aux.mir | 50 + .../postgres/entity-draft-id-equality.jsonc | 14 + .../postgres/entity-draft-id-equality.stdout | 10 + .../postgres/entity-type-ids-lateral.aux.mir | 50 + .../ui/postgres/entity-type-ids-lateral.jsonc | 14 + .../postgres/entity-type-ids-lateral.stdout | 20 + .../ui/postgres/entity-uuid-equality.aux.mir | 104 ++ .../ui/postgres/entity-uuid-equality.jsonc | 16 + .../ui/postgres/entity-uuid-equality.stdout | 13 + .../postgres/entity-web-id-equality.aux.mir | 50 + .../ui/postgres/entity-web-id-equality.jsonc | 14 + .../ui/postgres/entity-web-id-equality.stdout | 13 + .../ui/postgres/env-captured-variable.aux.mir | 36 + .../ui/postgres/env-captured-variable.jsonc | 27 + .../ui/postgres/env-captured-variable.stdout | 13 + .../filter/binary_bitand_bigint_cast.snap | 7 + .../filter/binary_sub_numeric_cast.snap | 7 + .../data_island_provides_without_lateral.snap | 37 + .../ui/postgres/filter/diamond_cfg_merge.snap | 7 + .../filter/dynamic_index_projection.snap | 7 + .../filter/field_by_name_projection.snap | 7 + .../filter/field_index_projection.snap | 7 + .../filter/island_exit_empty_arrays.snap | 7 + .../ui/postgres/filter/island_exit_goto.snap | 7 + .../filter/island_exit_switch_int.snap | 7 + .../filter/island_exit_with_live_out.snap | 7 + .../postgres/filter/left_entity_filter.snap | 7 + .../filter/nested_property_access.snap | 7 + .../filter/property_field_equality.snap | 7 + .../ui/postgres/filter/property_mask.snap | 39 + .../provides_drives_select_and_joins.snap | 37 + .../filter/straight_line_goto_chain.snap | 7 + .../filter/switch_int_many_branches.snap | 7 + .../ui/postgres/filter/unary_bitnot.snap | 7 + .../tests/ui/postgres/filter/unary_neg.snap | 7 + .../tests/ui/postgres/filter/unary_not.snap | 7 + .../ui/postgres/if-input-branches.aux.mir | 66 ++ .../tests/ui/postgres/if-input-branches.jsonc | 21 + .../ui/postgres/if-input-branches.stdout | 15 + .../postgres/input-parameter-exists.aux.mir | 80 ++ .../ui/postgres/input-parameter-exists.jsonc | 11 + .../ui/postgres/input-parameter-exists.stdout | 13 + .../ui/postgres/input-parameter-load.aux.mir | 50 + .../ui/postgres/input-parameter-load.jsonc | 14 + .../ui/postgres/input-parameter-load.stdout | 13 + .../ui/postgres/left-entity-filter.jsonc | 14 + .../postgres/let-binding-propagation.aux.mir | 50 + .../ui/postgres/let-binding-propagation.jsonc | 16 + .../postgres/let-binding-propagation.stdout | 13 + .../ui/postgres/list-construction.aux.mir | 66 ++ .../tests/ui/postgres/list-construction.jsonc | 20 + .../ui/postgres/list-construction.stdout | 14 + .../ui/postgres/logical-and-inputs.aux.mir | 80 ++ .../ui/postgres/logical-and-inputs.jsonc | 14 + .../ui/postgres/logical-and-inputs.stdout | 14 + .../tests/ui/postgres/logical-not-input.jsonc | 11 + .../minimal-select-no-extra-joins.aux.mir | 50 + .../minimal-select-no-extra-joins.jsonc | 14 + .../minimal-select-no-extra-joins.stdout | 13 + .../ui/postgres/mixed-sources-filter.aux.mir | 49 + .../ui/postgres/mixed-sources-filter.jsonc | 35 + .../ui/postgres/mixed-sources-filter.stdout | 17 + .../ui/postgres/multiple-filters.aux.mir | 75 ++ .../tests/ui/postgres/multiple-filters.jsonc | 22 + .../tests/ui/postgres/multiple-filters.stdout | 16 + .../postgres/nested-if-input-branches.aux.mir | 82 ++ .../postgres/nested-if-input-branches.jsonc | 28 + .../postgres/nested-if-input-branches.stdout | 17 + .../ui/postgres/nested-property-access.jsonc | 14 + .../ui/postgres/opaque-passthrough.aux.mir | 120 ++ .../ui/postgres/opaque-passthrough.jsonc | 16 + .../ui/postgres/opaque-passthrough.stdout | 13 + .../properties-triggers-editions-join.stderr | 10 + .../ui/postgres/property-field-equality.jsonc | 14 + .../ui/postgres/struct-construction.aux.mir | 80 ++ .../ui/postgres/struct-construction.jsonc | 20 + .../ui/postgres/struct-construction.stdout | 16 + .../ui/postgres/tuple-construction.aux.mir | 80 ++ .../ui/postgres/tuple-construction.jsonc | 20 + .../ui/postgres/tuple-construction.stdout | 14 + 109 files changed, 6841 insertions(+), 6 deletions(-) create mode 100644 libs/@local/hashql/compiletest/src/suite/eval_postgres.rs create mode 100644 libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md create mode 100644 libs/@local/hashql/eval/src/context.rs create mode 100644 libs/@local/hashql/eval/src/postgres/continuation.rs create mode 100644 libs/@local/hashql/eval/src/postgres/error.rs create mode 100644 libs/@local/hashql/eval/src/postgres/filter/mod.rs create mode 100644 libs/@local/hashql/eval/src/postgres/filter/tests.rs create mode 100644 libs/@local/hashql/eval/src/postgres/mod.rs create mode 100644 libs/@local/hashql/eval/src/postgres/parameters.rs create mode 100644 libs/@local/hashql/eval/src/postgres/projections.rs create mode 100644 libs/@local/hashql/eval/src/postgres/traverse.rs create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/.spec.toml create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/arithmetic-addition-casts.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/dict-construction.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/dict-construction.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_empty_arrays.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/left-entity-filter.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/list-construction.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/list-construction.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/logical-not-input.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/nested-property-access.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/properties-triggers-editions-join.stderr create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/property-field-equality.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/struct-construction.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/struct-construction.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.aux.mir create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.jsonc create mode 100644 libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout diff --git a/libs/@local/hashql/compiletest/src/lib.rs b/libs/@local/hashql/compiletest/src/lib.rs index 3aadcb739e6..d800d042cb8 100644 --- a/libs/@local/hashql/compiletest/src/lib.rs +++ b/libs/@local/hashql/compiletest/src/lib.rs @@ -7,6 +7,7 @@ if_let_guard, // Library Features + allocator_api, assert_matches, duration_millis_float, exitcode_exit_method, diff --git a/libs/@local/hashql/compiletest/src/suite/eval_postgres.rs b/libs/@local/hashql/compiletest/src/suite/eval_postgres.rs new file mode 100644 index 00000000000..496d237f8a0 --- /dev/null +++ b/libs/@local/hashql/compiletest/src/suite/eval_postgres.rs @@ -0,0 +1,206 @@ +use core::{alloc::Allocator, fmt::Write as _}; + +use hashql_ast::node::expr::Expr; +use hashql_core::{ + heap::ResetAllocator as _, + pretty::Formatter, + r#type::{TypeFormatter, TypeFormatterOptions, environment::Environment}, +}; +use hashql_diagnostics::DiagnosticIssues; +use hashql_eval::{context::EvalContext, postgres::PostgresCompiler}; +use hashql_mir::{ + body::{Body, basic_block::BasicBlockId, terminator::TerminatorKind}, + context::MirContext, + def::DefIdSlice, + intern::Interner, + pass::{ + GlobalAnalysisPass as _, + analysis::SizeEstimationAnalysis, + execution::{ExecutionAnalysis, ExecutionAnalysisResidual, TargetId}, + }, + pretty::{TextFormatAnnotations, TextFormatOptions}, +}; + +use super::{ + RunContext, Suite, SuiteDiagnostic, + common::{Header, process_issues}, + mir_pass_transform_post_inline::mir_pass_transform_post_inline, +}; + +/// Annotates each basic block with its execution target (Postgres / Interpreter / Embedding). +struct PlacementAnnotation<'ctx, A: Allocator> { + /// The residual for the body currently being formatted, if it has one. + current: Option<&'ctx ExecutionAnalysisResidual>, +} + +impl TextFormatAnnotations for PlacementAnnotation<'_, A> { + type BasicBlockAnnotation<'this, 'heap> + = &'static str + where + Self: 'this; + + fn annotate_basic_block<'heap>( + &self, + id: BasicBlockId, + _block: &hashql_mir::body::basic_block::BasicBlock<'heap>, + ) -> Option> { + let residual = self.current?; + let target = residual.assignment.get(id)?; + + Some(match *target { + TargetId::Interpreter => "interpreter", + TargetId::Postgres => "postgres", + TargetId::Embedding => "embedding", + }) + } +} + +pub(crate) struct EvalPostgres; + +impl Suite for EvalPostgres { + fn name(&self) -> &'static str { + "eval/postgres" + } + + fn description(&self) -> &'static str { + "PostgreSQL query compilation from MIR" + } + + fn secondary_file_extensions(&self) -> &[&str] { + &["mir"] + } + + fn run<'heap>( + &self, + RunContext { + heap, + diagnostics, + secondary_outputs, + .. + }: RunContext<'_, 'heap>, + expr: Expr<'heap>, + ) -> Result { + let mut environment = Environment::new(heap); + let interner = Interner::new(heap); + + let (_, mut bodies, mut scratch) = mir_pass_transform_post_inline( + heap, + expr, + &interner, + (), + &mut environment, + diagnostics, + )?; + + let mut context = MirContext { + heap, + env: &environment, + interner: &interner, + diagnostics: DiagnosticIssues::new(), + }; + + let mut size_estimation_analysis = SizeEstimationAnalysis::new_in(&scratch); + size_estimation_analysis.run(&mut context, &bodies); + let footprints = size_estimation_analysis.finish(); + scratch.reset(); + + let analysis = ExecutionAnalysis { + footprints: &footprints, + scratch: &mut scratch, + }; + let analysis = analysis.run_all_in(&mut context, &mut bodies, heap); + scratch.reset(); + + process_issues(diagnostics, context.diagnostics)?; + + // Capture MIR after execution analysis with block placement annotations. + let mir_buf = format_mir_with_placement(heap, &environment, &bodies, &analysis); + secondary_outputs.insert("mir", mir_buf); + + let mut context = + EvalContext::new_in(&environment, &bodies, &analysis, context.heap, &mut scratch); + scratch.reset(); + + // Inside of **all** the bodies, find the `GraphRead` terminators to compile. + let mut prepared_queries = Vec::new(); + let mut compiler = PostgresCompiler::new_in(&mut context, &mut scratch); + + for body in &bodies { + for block in &*body.basic_blocks { + if let TerminatorKind::GraphRead(read) = &block.terminator.kind { + let prepared_query = compiler.compile(read); + prepared_queries.push(prepared_query); + } + } + } + + scratch.reset(); + process_issues(diagnostics, context.diagnostics)?; + + let mut output = String::new(); + + for (index, query) in prepared_queries.iter().enumerate() { + if index > 0 { + let _ = writeln!(output); + } + + let sql = query.transpile().to_string(); + + let _ = writeln!(output, "{}\n\n{sql}", Header::new("SQL")); + + if !query.parameters.is_empty() { + let _ = writeln!( + output, + "\n{}\n\n{}", + Header::new("Parameters"), + query.parameters + ); + } + } + + Ok(output) + } +} + +fn format_mir_with_placement<'heap, A: Allocator>( + heap: &'heap hashql_core::heap::Heap, + env: &Environment<'heap>, + bodies: &DefIdSlice>, + analysis: &DefIdSlice>>, +) -> String { + let formatter = Formatter::new(heap); + let types = TypeFormatter::new( + &formatter, + env, + TypeFormatterOptions::terse().with_qualified_opaque_names(true), + ); + + let mut text_format = TextFormatOptions { + writer: Vec::::new(), + indent: 4, + sources: bodies, + types, + annotations: PlacementAnnotation { current: None }, + } + .build(); + + // Format each body, setting the current residual so the annotation can look up + // block targets for graph-read filter bodies. + let mut first = true; + for body in bodies { + text_format.replace_annotations(PlacementAnnotation { + current: analysis.get(body.id).and_then(Option::as_ref), + }); + + if !first { + text_format.writer.extend_from_slice(b"\n\n"); + } + first = false; + + text_format + .format_body(body) + .expect("should be able to format body"); + } + + String::from_utf8_lossy_owned(text_format.writer) +} diff --git a/libs/@local/hashql/compiletest/src/suite/mir_pass_transform_pre_inline.rs b/libs/@local/hashql/compiletest/src/suite/mir_pass_transform_pre_inline.rs index 96bcaeca2ae..25c2c906b5e 100644 --- a/libs/@local/hashql/compiletest/src/suite/mir_pass_transform_pre_inline.rs +++ b/libs/@local/hashql/compiletest/src/suite/mir_pass_transform_pre_inline.rs @@ -41,6 +41,10 @@ pub(crate) trait MirRenderer { ); } +impl MirRenderer for () { + fn render<'heap>(&mut self, _: &mut RenderContext<'_, 'heap>, _: &DefIdSlice>) {} +} + impl MirRenderer for &mut R where R: MirRenderer, diff --git a/libs/@local/hashql/compiletest/src/suite/mod.rs b/libs/@local/hashql/compiletest/src/suite/mod.rs index f0e5deb6125..6326eeeeebb 100644 --- a/libs/@local/hashql/compiletest/src/suite/mod.rs +++ b/libs/@local/hashql/compiletest/src/suite/mod.rs @@ -10,6 +10,7 @@ mod ast_lowering_type_definition_extractor; mod ast_lowering_type_extractor; pub(crate) mod common; mod eval_graph_read_entity; +mod eval_postgres; mod hir_lower_alias_replacement; mod hir_lower_checking; mod hir_lower_ctor; @@ -49,7 +50,7 @@ use self::{ ast_lowering_special_form_expander::AstLoweringSpecialFormExpanderSuite, ast_lowering_type_definition_extractor::AstLoweringTypeDefinitionExtractorSuite, ast_lowering_type_extractor::AstLoweringTypeExtractorSuite, - eval_graph_read_entity::EvalGraphReadEntitySuite, + eval_graph_read_entity::EvalGraphReadEntitySuite, eval_postgres::EvalPostgres, hir_lower_alias_replacement::HirLowerAliasReplacementSuite, hir_lower_checking::HirLowerTypeCheckingSuite, hir_lower_ctor::HirLowerCtorSuite, hir_lower_graph_hoisting::HirLowerGraphHoistingSuite, @@ -151,6 +152,7 @@ const SUITES: &[&dyn Suite] = &[ &AstLoweringTypeDefinitionExtractorSuite, &AstLoweringTypeExtractorSuite, &EvalGraphReadEntitySuite, + &EvalPostgres, &HirLowerAliasReplacementSuite, &HirLowerCtorSuite, &HirLowerGraphHoistingSuite, diff --git a/libs/@local/hashql/eval/Cargo.toml b/libs/@local/hashql/eval/Cargo.toml index c313273ff11..bea6be986f8 100644 --- a/libs/@local/hashql/eval/Cargo.toml +++ b/libs/@local/hashql/eval/Cargo.toml @@ -8,14 +8,16 @@ version.workspace = true [dependencies] # Public workspace dependencies -hash-graph-store = { workspace = true, optional = true, public = true } -hashql-diagnostics = { workspace = true, public = true } -hashql-hir = { workspace = true, optional = true, public = true } +hash-graph-postgres-store = { workspace = true, public = true } +hash-graph-store = { workspace = true, optional = true, public = true } +hashql-diagnostics = { workspace = true, public = true } +hashql-hir = { workspace = true, public = true } +hashql-mir = { workspace = true, public = true } # Public third-party dependencies # Private workspace dependencies -hashql-core = { workspace = true, optional = true } +hashql-core = { workspace = true } type-system = { workspace = true, optional = true } # Private third-party dependencies @@ -24,9 +26,10 @@ simple-mermaid = { workspace = true } [dev-dependencies] hashql-compiletest = { workspace = true } +insta = { workspace = true } [features] -graph = ["dep:hash-graph-store", "dep:hashql-core", "dep:hashql-hir", "dep:type-system"] +graph = ["dep:hash-graph-store", "dep:type-system"] [lints] workspace = true diff --git a/libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md b/libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md new file mode 100644 index 00000000000..d86ba5de79f --- /dev/null +++ b/libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md @@ -0,0 +1,605 @@ +# PostgreSQL Evaluator Test Plan + +Comprehensive test plan for `libs/@local/hashql/eval/src/postgres/` — the CFG-to-SQL lowering +module that compiles MIR islands into PostgreSQL `SELECT` statements. + +**Status:** Tier 1 compiletest coverage complete (21 tests); remaining Tier 1 tests blocked on +missing HIR features. Tier 3 MIR builder snapshot tests mostly complete (15/17 tests). Tier 2 +unit tests not yet started. + +**Legend:** ✅ done · ⏭ skipped (reason noted) · 📸 needs snapshot test · ❌ not started + +--- + +## Tier 1: Compiletest Suite — `eval/postgres/entity` + +End-to-end tests: J-Expr → AST → HIR → MIR → execution analysis → `PostgresCompiler::compile()` +→ transpiled SQL string. + +### Suite implementation + +New file: `compiletest/src/suite/eval_postgres_entity.rs` + +Pipeline: + +1. Reuse `mir_reify::mir_reify()` to get `(root_def, bodies)` +2. Run the standard MIR optimization pipeline (reuse whatever shared pipeline helper exists, + or centralize one — do NOT manually list individual passes, as that will drift) +3. Run `ExecutionAnalysis` on graph read filter bodies → `IslandGraph` +4. Build `EvalContext::new_in()` (computes `LiveOut` automatically) +5. Walk root body to find `GraphRead` terminators +6. Call `PostgresCompiler::compile(graph_read)` +7. Output `statement.transpile_to_string()` + parameter summary + +Test location: `eval/tests/ui/postgres/entity/` +Spec: `suite = "eval/postgres/entity"` + +Output format (in `.stdout`): + +``` +════ SQL ════════════════════════════════════════════════════════════════════════ + +SELECT ... +FROM ... +WHERE ... + +════ Parameters ════════════════════════════════════════════════════════════════ + +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Symbol("entity_uuid") +... +``` + +**Harness note — parameter summary:** `Parameters`' internal `reverse` mapping uses a private +enum. To output the parameter summary, either add a `pub fn debug_summary(&self) -> String` +behind a `#[cfg(test)]` or a dedicated feature gate, or reduce output to just parameter count +and indices. Decide during implementation. + +**Snapshot stability:** avoid asserting specific `DefId`/`IslandId` numbers in continuation +aliases (e.g. `continuation_0_0`). These depend on upstream lowering/inline decisions. Assert +patterns instead: aliases start with `continuation_`, SELECT includes `*_block`, `*_locals`, +`*_values` columns. + +### Tests — basic control flow + +Note: constant expressions (`if true then ...`) are folded away by the MIR optimization +pipeline (inst_simplify, forward_substitution, DCE, CFG simplification) before reaching the +postgres compiler. To test control flow, use `[input ...]` parameters — they're opaque to +the optimizer and force runtime branching. + +#### `constant-true-filter` ✅ + +- **run:** pass +- **input:** filter body that returns literal `true` (after optimization, the entire filter + body is a single-block `return true`) +- **tests:** simplest possible compilation — no branching, straight-line `Return` continuation, + temporal conditions on base table, continuation LATERAL with OFFSET 0 +- **verifies:** baseline SQL shape, continuation composite structure `(filter, NULL, NULL, NULL)::continuation` + +#### `if-input-branches` ✅ + +- **run:** pass +- **input:** `if [input foo] then else ` where `foo` is a boolean input + parameter and both branches are distinct runtime expressions +- **tests:** `SwitchInt` → `CASE WHEN discriminant = THEN ... ELSE ... END` structure, + both branches produce continuations, input parameter appears as `$N` +- **verifies:** CASE tree generation, branch ordering matches `SwitchTargets`, input parameter + compilation + +#### `nested-if-input-branches` ✅ + +- **run:** pass +- **input:** nested `if` with input discriminants: + `if [input foo] then (if [input bar] then else ) else ` +- **tests:** nested `CASE WHEN` expressions — inner CASE as result of outer CASE branch +- **verifies:** stack-based compilation produces correctly nested SQL, snapshot/rollback of + locals across branches + +### Tests — entity path access & comparisons + +#### `entity-uuid-equality` ✅ + +- **run:** pass +- **input:** `vertex.id.entity_id.entity_uuid == ` +- **tests:** entity path resolution for `EntityUuid`, equality comparison (no cast), primitive + parameter for the UUID string +- **verifies:** `EntityPath::EntityUuid` → correct column reference on `entity_temporal_metadata`, + comparison operators don't add unnecessary casts + +#### `entity-web-id-equality` ✅ + +- **run:** pass +- **input:** `vertex.id.entity_id.web_id == ` +- **tests:** entity path resolution for `WebId` +- **verifies:** `EntityPath::WebId` → correct column reference + +#### `entity-draft-id-equality` ✅ + +- **run:** pass +- **input:** `vertex.id.draft_id == ` +- **tests:** entity path resolution for `DraftId` +- **verifies:** `EntityPath::DraftId` → correct column reference +- **note:** execution analysis places the equality comparison in the interpreter, not in SQL. + The test still exercises DraftId path resolution via the provides set (column appears in + SELECT), but the `==` itself is an island exit, not an in-SQL comparison. + +#### `entity-archived-check` ✅ + +- **run:** pass +- **input:** filter on `vertex.metadata.archived == false` +- **tests:** entity path resolution for `Archived`, which requires `entity_editions` join +- **verifies:** `EntityPath::Archived` triggers `entity_editions` JOIN in FROM clause +- **note:** optimizer simplifies `archived == false` to `NOT(archived)`, which is fine — still + exercises the editions join and correct column reference. + +### Tests — property access + +#### `property-field-equality` ⏭ 📸 + +- **run:** skip +- **skip reason:** property subscript requires a concrete entity type; the type system cannot + resolve unknown field access on `vertex.properties` yet +- **input:** `vertex.properties. == "value"` +- **tests:** `EntityPath::Properties` → `entity_editions` join, property field access generates + `json_extract_path(properties, $key::text)`, equality comparison on jsonb +- **verifies:** json_extract_path chain, property access triggers entity_editions join, + parameter for field name symbol +- **covered by:** Tier 3 `property_field_equality` snapshot test + +#### `nested-property-access` ⏭ 📸 + +- **run:** skip +- **skip reason:** property subscript requires a concrete entity type (same as + `property-field-equality`) +- **input:** `vertex.properties.. == "value"` +- **tests:** multi-level property access → `json_extract_path(properties, $key1::text, $key2::text)` +- **verifies:** projection chain accumulates all indices into single json_extract_path call +- **covered by:** Tier 3 `nested_property_access` snapshot test + +### Tests — arithmetic & type casts + +The code casts differently depending on operator category: + +- **Arithmetic** (`Add`, `Sub`): both operands cast to `::numeric` +- **Bitwise** (`BitAnd`, `BitOr`): both operands cast to `::bigint` +- **Comparison** (`Eq`, `Ne`, `Lt`, `Lte`, `Gt`, `Gte`): **no cast** — operates on jsonb directly + +#### `comparison-no-cast` ✅ + +- **run:** pass +- **input:** `[input x] > [input y]` (input parameters, not properties — HIR lacks arithmetic + intrinsics so property access isn't needed; inputs exercise the same cast logic) +- **tests:** `BinOp::Gt` → `BinaryOperator::Greater` with **no** type casts on either operand +- **verifies:** comparison operators do not add unnecessary casts + +#### `arithmetic-addition-casts` ⏭ 📸 + +- **run:** skip +- **skip reason:** `::core::math::add` intrinsic not supported in HIR specialization yet + (H-4728) +- **input:** `[input x] + [input y] > 0` +- **tests:** `BinOp::Add` → `BinaryOperator::Add` with `::numeric` casts on both operands, + the result then compared with `>` (which itself does NOT cast) +- **verifies:** arithmetic ops cast to numeric, comparison on arithmetic result works, + correct nesting of cast vs. non-cast expressions +- **note:** addition exists in MIR (`bin.+`) but cannot be produced from J-Expr. Needs a + Tier 3 MIR builder snapshot test to exercise this code path. + +### Tests — boolean logic + +All boolean logic tests use input parameters as operands to prevent constant folding. + +Note: `logical-and-inputs` primarily verifies that `&&` desugaring survives optimization and +produces a CASE in SQL. If `if-input-branches` already covers CASE generation sufficiently, +one of these two can be dropped. Keep at least one AND/OR test as a smoke test for the +desugaring → SQL path. + +#### `logical-and-inputs` ✅ + +- **run:** pass +- **input:** `[input a] && [input b]` (two input parameters combined with AND) +- **tests:** `&&` desugars to `if a then b else false` — since `a` is a runtime input, the + branch survives optimization and produces a `CASE WHEN` in SQL +- **verifies:** short-circuit AND compiles to correct CASE structure +- **note:** execution analysis places both branches as island exits (the `b` load and the + literal `false` both transfer control to the interpreter). The test exercises island exit + from a SwitchInt, but does not produce a CASE tree with filter return branches. The + discriminant (`input a`) is evaluated in Postgres. + +#### `logical-not-input` ⏭ 📸 + +- **run:** skip +- **skip reason:** `::core::bool::not` intrinsic not supported in HIR specialization yet + (H-4729) +- **input:** `! [input a]` (negation of input parameter) +- **tests:** `UnOp::Not` → `UnaryOperator::Not` applied to an input parameter `$N` +- **verifies:** unary NOT in SQL output +- **note:** unary NOT exists in MIR (`un.!`) but cannot be produced from J-Expr. Needs a + Tier 3 MIR builder snapshot test to exercise this code path. + +### Tests — input parameters & environment + +#### `input-parameter-load` ✅ + +- **run:** pass +- **input:** filter comparing entity field to a user-provided input parameter +- **tests:** `RValue::Input(InputOp::Load)` → parameter `$N`, parameter deduplication + (same input referenced twice → same `$N`) +- **verifies:** input parameters allocated correctly, dedup works + +#### `input-parameter-exists` ✅ + +- **run:** pass +- **input:** optional input parameter with default: `[input "optional_flag" Boolean true]` +- **tests:** optional input desugars to `if exists(flag) then load(flag) else default` +- **verifies:** `InputOp::Exists` appears in MIR; execution analysis places the SwitchInt + and both branches in the interpreter via island exit +- **note:** the `InputOp::Exists` → `NOT IS NULL` SQL pattern is NOT exercised because the + exists check is placed in the interpreter, not in the Postgres island. The test verifies + optional input desugaring + island exit behavior. `NOT IS NULL` needs a Tier 3 test. + +#### `env-captured-variable` ✅ + +- **run:** pass +- **input:** filter referencing an input from outer scope (captured in closure environment); + wrapped in `if true then ... else ...` to prevent thunk conversion + subsequent inlining + from eliminating the env access +- **tests:** environment field access → `env.` → parameter `$N` via `db.parameters.env()` +- **verifies:** captured variables become `Env(#N)` parameters, field projection on env works + +### Tests — aggregate construction + +#### `struct-construction` ✅ + +- **run:** pass +- **input:** filter that constructs a struct value (may appear in intermediate computations) +- **tests:** `AggregateKind::Struct` → `jsonb_build_object(key1, val1, key2, val2)` +- **verifies:** struct field names become symbol parameters, values are compiled operands + +#### `tuple-construction` ✅ + +- **run:** pass +- **input:** filter that constructs a tuple value +- **tests:** `AggregateKind::Tuple` → `jsonb_build_array(val1, val2, ...)` +- **verifies:** tuple elements become jsonb_build_array arguments + +#### `list-construction` ✅ + +- **run:** pass +- **input:** filter that constructs a list value +- **tests:** `AggregateKind::List` → `jsonb_build_array(val1, val2, ...)` +- **verifies:** list and tuple use the same lowering (`jsonb_build_array`) but are distinct + code paths — ensures the `List` match arm works + +#### `dict-construction` ✅ + +- **run:** pass +- **input:** filter that constructs a dict/map value +- **tests:** `AggregateKind::Dict` → `jsonb_build_object(k1, v1, k2, v2)` with operands + consumed in pairs via `array_chunks()` +- **verifies:** key-value pairing logic (the `operands.len() % 2 == 0` invariant and the + chunked iteration) + +#### `opaque-passthrough` ✅ + +- **run:** pass +- **input:** filter involving an opaque type wrapper (e.g. `EntityUuid(Uuid(...))`) +- **tests:** `AggregateKind::Opaque` → passes through the single inner operand unchanged +- **verifies:** opaque wrapper is transparent in SQL — no wrapping function, just the inner + expression + +#### `let-binding-propagation` ✅ + +- **run:** pass +- **input:** filter with let-bindings referencing input parameters, used in the filter condition + (e.g. `let x = [input foo] in vertex.properties.field == x`) +- **tests:** locals map tracks intermediate values correctly through compilation — the let-bound + local holds an input parameter expression, which is then used in a comparison +- **verifies:** let-bound values propagate correctly through the local → expression mapping, + input parameter deduplication still works across let-bindings + +### Tests — relationship / edge entity fields + +#### `left-entity-filter` ⏭ 📸 + +- **run:** skip +- **skip reason:** `link_data` is `Option`; accessing fields through Option requires + unwrap/pattern-match not yet expressible in filter J-Expr +- **input:** filter on `vertex.link_data.left_entity_id.entity_uuid == [input id]` +- **tests:** `EntityPath::LeftEntityUuid` → LEFT OUTER JOIN on `entity_has_left_entity`, + correct column reference +- **verifies:** edge/relationship fields trigger the correct join type (LEFT OUTER, not INNER) +- **covered by:** Tier 3 `left_entity_filter` snapshot test + +### Tests — multi-source "kitchen sink" + +#### `mixed-sources-filter` ✅ + +- **run:** pass +- **input:** two-filter pipeline: first checks `vertex.metadata.archived == false` (editions + join + primitive), second checks `vertex.entity_uuid == env_uuid` (env capture via + `if true then ...` anti-inlining wrapper) +- **tests:** exercises multiple parameter categories simultaneously: `TemporalAxis` (always + present), `Env` (captured variable); also exercises entity_editions join (archived), + two continuation laterals, two WHERE conditions +- **verifies:** parameter categories coexist, multiple join types in one query, correct + WHERE composition (temporal + continuation filters) + +### Tests — join planning (provides-driven SELECT) + +#### `minimal-select-no-extra-joins` ✅ + +- **run:** pass +- **input:** filter that only accesses temporal metadata fields (web_id, entity_uuid) +- **tests:** only base table (`entity_temporal_metadata`) in FROM, no unnecessary joins +- **verifies:** lazy join planning: unused tables are not joined + +#### `properties-triggers-editions-join` ⏭ 📸 + +- **run:** skip +- **skip reason:** `vertex.properties` is generic `T` and requires a concrete entity type; + same fundamental limitation as `property-field-equality`. The editions join IS tested + indirectly by `entity-archived-check` which accesses `EntityPath::Archived`. +- **input:** filter accessing `vertex.properties` +- **tests:** `entity_editions` JOIN appears in FROM clause +- **verifies:** `EntityPath::Properties` correctly triggers the editions join +- **covered by:** Tier 3 `property_field_equality` snapshot test (editions join visible in + the `entity_editions_0_0_1` table alias) + +#### `entity-type-ids-lateral` ✅ + +- **run:** pass +- **input:** query that requires entity type IDs in output +- **tests:** LEFT JOIN LATERAL subquery for `entity_is_of_type_ids` with unnest + jsonb_agg +- **verifies:** computed column via lateral join, correct correlation condition on edition_id + +### Tests — query structure + +Note: temporal conditions (`&&` overlap on transaction_time and decision_time) are unconditional +— they appear in every compiled query. Rather than a standalone test, verify their presence in +the baseline `constant-true-filter` snapshot and spot-check in others. + +#### `property-mask` ❌ 📸 + +- **run:** pass (requires suite directive to inject mask, since mask comes from permission + system, not the query) +- **input:** query where `Properties` and `PropertyMetadata` are in the provides set (i.e. + they appear in SELECT because the interpreter needs them back, not just because the filter + references them) +- **suite directive:** `property_mask = true` (or similar) to inject a mask expression +- **tests:** `properties` and `property_metadata` SELECT expressions wrapped as `(col - mask)`, + other JSON expressions (e.g. `RecordId`'s `jsonb_build_object`) are NOT masked +- **verifies:** property mask applies only to property columns in the SELECT list, not to + filter-internal property references +- **covered by:** Tier 3 `property_mask` snapshot test (calls `with_property_mask()` directly) + +#### `multiple-filters` ✅ + +- **run:** pass +- **input:** graph read with two separate filter bodies +- **tests:** two CROSS JOIN LATERAL subqueries in FROM, each with OFFSET 0, two continuation + aliases (names start with `continuation_`), WHERE includes both `IS NOT FALSE` conditions, + SELECT decomposes both continuations with `*_block`, `*_locals`, `*_values` columns +- **verifies:** multi-filter compilation, correct SELECT column decomposition for both; + assert alias patterns rather than specific numeric ids + +### Non-goal: error diagnostics via compiletest + +All postgres diagnostics are `Severity::Bug` — they represent internal invariant violations +(closures, nested graph reads, function pointers, etc. placed into a Postgres island). The +placement pass would never produce these MIR shapes, so there is no valid J-Expr input that +triggers them through the full pipeline. These are tested as unit tests in Tier 2 (`error.rs`) +and optionally via hand-crafted MIR in Tier 3. + +--- + +## Tier 2: Unit Tests + +Standard `#[cfg(test)] mod tests` in the source files. Tests that would be tautological +(restating match arms, asserting structural constants) were dropped. + +### `parameters.rs` — deduplication ✅ (6 tests) + +Tests parameter deduplication and category isolation: same input → same index, different +inputs → different indices, cross-category isolation (`Input("x")` vs `Symbol("x")`), +temporal axis stability, and env field dedup. + +### `continuation.rs` — naming ✅ (2 tests) + +Tests continuation alias naming and field identifier construction. + +### `traverse.rs`, `projections.rs`, `error.rs` ⏭ + +Dropped — the entity path → SQL column mapping, lazy join planning, and diagnostic +constructors are exercised transitively through the Tier 1 compiletest suite and Tier 3 +snapshot tests with sufficient coverage. Standalone unit tests for these would either restate +match arms (traverse), assert structural constants (continuation column names), or test +unreachable error paths (diagnostics). + +--- + +## Tier 3: MIR Builder Snapshot Tests + +Programmatic MIR via `body!` macro, compiled through the real execution analysis pipeline. +These test MIR constructs that exist in the compiler but cannot yet be produced from J-Expr — +either because the HIR specialization phase doesn't support the intrinsic (e.g. arithmetic, +unary NOT) or because the type system can't resolve the access yet (e.g. property field +subscripts on generic entity types). + +Test location: `eval/src/postgres/filter/tests.rs` +Snapshots: `eval/tests/ui/postgres/filter/` + +### Shared test harness + +A `Fixture` struct that: + +1. Takes a `body!`-constructed MIR body with `Source::GraphReadFilter` +2. Runs `SizeEstimationAnalysis` + `ExecutionAnalysis::run_all_in` (the public API) to + compute island boundaries via the real solver +3. Stores bodies and execution residuals for compilation + +Two compile helpers: + +- `compile_filter_islands()` — compiles each Postgres exec island via + `GraphReadFilterCompiler::compile_body()`, returns per-island SQL expressions +- `compile_full_query()` — synthesizes a `GraphRead` and calls + `PostgresCompiler::compile()`, returns full SELECT + parameters + +**Island boundary control:** the solver decides placement based on cost. To force a +Postgres→Interpreter boundary, bb0 must accumulate enough transfer cost to exceed the P→I +switch cost (8). Use heavy entity path loads (properties, composites like RecordId, +TemporalVersioning) in bb0, and an `apply` in bb1 to force Interpreter. Lightweight paths +(single UUIDs) are insufficient because block splitting fragments the body. + +### Tests — data islands & provides integration + +#### `data_island_provides_without_lateral` ✅ + +- **body:** island graph where a non-Postgres island requires Postgres-origin traversal paths, + causing the resolver to insert a Postgres `IslandKind::Data` island +- **tests:** the data island contributes output columns to `provides` (so they appear in the + SELECT list with correct joins) but does NOT generate a continuation LATERAL subquery +- **verifies:** `compile_graph_read_filter_island()` returns `None` for data islands, + `provides.insert(island.provides())` still runs, no spurious CROSS JOIN LATERAL + +#### `provides_drives_select_and_joins` ✅ + +- **body:** entity path loads (EntityUuid, Archived) in bb0, apply in bb1 forces Interpreter; + uses `compile_full_query()` to exercise the full `PostgresCompiler::compile()` path +- **tests:** SELECT list includes provided paths with correct joins, continuation LATERAL + appears, parameter summary shows temporal axes and symbols +- **verifies:** end-to-end provides → traverse → projections → build_from integration + +### Tests — control flow edge cases + +#### `island_exit_goto` ✅ + +- **body:** heavy entity path loads (properties, composites) in bb0, apply in bb1 +- **tests:** `Goto` crossing island boundary → `Continuation::IslandExit` +- **verifies:** continuation has correct `block` id, `locals` array, `values` array, all + cast to `::continuation` + +#### `island_exit_with_live_out` ✅ + +- **body:** heavy entity path loads + input in bb0, apply in bb1; input is live-out +- **tests:** island exit captures both block parameters AND remaining live-out locals +- **verifies:** `locals` array contains block param ids first, then live-out local ids; + `values` array has corresponding expressions in same order + +#### `island_exit_switch_int` ✅ + +- **body:** heavy entity path loads + SwitchInt in bb0; bb1 returns, bb2 has apply +- **tests:** one CASE branch produces a `Return` continuation, the other produces an + `IslandExit` continuation +- **verifies:** mixed continuation types within a single CASE tree — one branch has + `(filter, NULL, NULL, NULL)`, the other has `(NULL, block, locals[], values[])` + +#### `diamond_cfg_merge` ✅ + +- **body:** bb0 branches (SwitchInt on input) to bb1 and bb2, both goto bb3 which returns; + all blocks in Postgres island +- **tests:** diamond CFG entirely within one island — both branches converge +- **verifies:** CASE with two branches, locals snapshot/rollback works correctly across + the diamond (bb1's local changes don't leak into bb2's compilation) + +#### `switch_int_many_branches` ✅ + +- **body:** SwitchInt on input with 4 value targets + otherwise +- **tests:** multi-way branch → CASE with 4 WHEN clauses + ELSE +- **verifies:** correct number of WHEN clauses in correct order, otherwise maps to ELSE + +#### `straight_line_goto_chain` ✅ + +- **body:** bb0 → bb1 → bb2 → return, all within Postgres island, with block parameters + passed at each goto via inputs +- **tests:** goto fast-path (no snapshot/rollback needed for linear chains), block parameter + assignment at each step +- **verifies:** gotos within island are followed directly without CASE, locals accumulate + correctly through the chain + +#### `island_exit_empty_arrays` ✅ + +- **body:** heavy entity path loads in bb0, apply in bb1; no locals from bb0 used by bb1 +- **tests:** continuation with empty `locals` and `values` arrays +- **verifies:** `ARRAY[]::int[]` and `ARRAY[]::jsonb[]` transpile correctly (edge case for + empty array literals with type cast) + +### Tests — projection kinds + +#### `field_index_projection` ✅ + +- **body:** tuple aggregate followed by `.0` numeric field projection +- **tests:** `ProjectionKind::Field(FieldIndex)` → `json_extract_path(base, (0)::text)` +- **verifies:** numeric field indices are cast to `::text` for json_extract_path + +#### `dynamic_index_projection` ✅ + +- **body:** list with Index projection where the key comes from an input (uses fluent builder + since `body!` doesn't support `ProjectionKind::Index`) +- **tests:** `ProjectionKind::Index(local)` → `json_extract_path(base, (local_expr)::text)` +- **verifies:** dynamic index expression is grouped and cast to `::text`, not confused with + static field names + +#### `field_by_name_projection` ✅ + +- **body:** struct field access using `ProjectionKind::FieldByName(symbol)` +- **tests:** symbol allocated as parameter, cast to `::text` for json_extract_path +- **verifies:** named field access uses `db.parameters.symbol()` and correct text cast + +### Tests — operator coverage + +These ensure all operator branches produce correct SQL with correct casts. + +**Priority:** these are the primary path for testing arithmetic and unary operators, since the +HIR specialization phase does not yet support `::core::math::*` (H-4728) or +`::core::bool::not` (H-4729) intrinsics. The Tier 1 compiletest tests for these operators are +skipped until the HIR catches up. + +#### `unary_neg` ✅ + +- **body:** `UnOp::Neg` applied to an input local +- **tests:** `UnaryOperator::Negate` in SQL output +- **verifies:** negation operator emits correctly + +#### `unary_not` ✅ + +- **body:** `UnOp::Not` applied to an input local +- **tests:** `UnaryOperator::Not` in SQL output +- **verifies:** logical NOT emits correctly + +#### `unary_bitnot` ✅ + +- **body:** `UnOp::BitNot` applied to a local +- **tests:** `UnaryOperator::BitwiseNot` in SQL output +- **verifies:** bitwise NOT emits correctly + +#### `binary_sub_numeric_cast` ✅ + +- **body:** `BinOp::Sub` on two input locals +- **tests:** `BinaryOperator::Subtract` with `::numeric` casts on both operands +- **verifies:** subtraction uses same cast logic as addition + +#### `binary_bitand_bigint_cast` ✅ + +- **body:** `BinOp::BitAnd` on two input locals +- **tests:** `BinaryOperator::BitwiseAnd` with `::bigint` casts on both operands +- **verifies:** bitwise ops use `bigint` cast (not `numeric`) + +### Tests — error diagnostics ⏭ + +All postgres diagnostics are `Severity::Bug` — internal invariant violations (closures, nested +graph reads, function pointers, etc. placed into a Postgres island). The public API +(`ExecutionAnalysis`) prevents invalid MIR from reaching the compiler: the placement solver +never assigns `Apply`, `Closure`, `FnPtr`, `GraphRead`, or projected assignments to Postgres +islands. These code paths are unreachable by construction, so testing them would require +bypassing the public API to hand-construct invalid island contents — which tests the test +harness, not the compiler. + +--- + +## Remaining Work + +1. **Tier 1 blocked on HIR:** `arithmetic-addition-casts` (H-4728). All other blocked Tier 1 + tests (`property-field-equality`, `nested-property-access`, `properties-triggers-editions-join`, + `logical-not-input`, `left-entity-filter`, `property-mask`) are now covered by snapshot tests. diff --git a/libs/@local/hashql/eval/src/context.rs b/libs/@local/hashql/eval/src/context.rs new file mode 100644 index 00000000000..6098a7d448d --- /dev/null +++ b/libs/@local/hashql/eval/src/context.rs @@ -0,0 +1,119 @@ +use core::{alloc::Allocator, ops::Index}; + +use hashql_core::{ + heap::BumpAllocator, id::bit_vec::DenseBitSet, r#type::environment::Environment, +}; +use hashql_diagnostics::DiagnosticIssues; +use hashql_mir::{ + body::{ + Body, Source, + basic_block::{BasicBlockId, BasicBlockSlice}, + local::Local, + }, + def::{DefId, DefIdSlice, DefIdVec}, + pass::{ + analysis::dataflow::{ + TraversalLivenessAnalysis, + framework::{DataflowAnalysis as _, DataflowResults}, + }, + execution::{ExecutionAnalysisResidual, VertexType, traversal::TraversalPathBitSet}, + }, +}; + +use crate::error::EvalDiagnosticIssues; + +struct BasicBlockLiveOut( + Box, TraversalPathBitSet)>, A>, +); + +impl Index for BasicBlockLiveOut { + type Output = DenseBitSet; + + #[inline] + fn index(&self, index: BasicBlockId) -> &Self::Output { + &self.0[index].0 + } +} + +pub struct LiveOut(DefIdVec>, A>); + +impl Index<(DefId, BasicBlockId)> for LiveOut { + type Output = DenseBitSet; + + #[inline] + fn index(&self, (body, index): (DefId, BasicBlockId)) -> &Self::Output { + &self.0[body] + .as_ref() + .expect("body should have completed live out analysis")[index] + } +} + +pub struct EvalContext<'ctx, 'heap, A: Allocator> { + pub env: &'ctx Environment<'heap>, + + pub bodies: &'ctx DefIdSlice>, + pub execution: &'ctx DefIdSlice>>, + + pub live_out: LiveOut, + pub diagnostics: EvalDiagnosticIssues, + pub alloc: A, +} + +impl<'ctx, 'heap, A: Allocator> EvalContext<'ctx, 'heap, A> { + pub fn new_in( + env: &'ctx Environment<'heap>, + bodies: &'ctx DefIdSlice>, + execution: &'ctx DefIdSlice>>, + alloc: A, + mut scratch: S, + ) -> Self + where + A: Clone, + { + let mut live_out = DefIdVec::new_in(alloc.clone()); + + for body in bodies { + match body.source { + Source::Ctor(_) + | Source::Closure(_, _) + | Source::Thunk(_, _) + | Source::Intrinsic(_) => continue, + Source::GraphReadFilter(_) => {} + } + + let Some(vertex) = VertexType::from_local(env, &body.local_decls[Local::VERTEX]) else { + unreachable!("graph related operations always have at least two args") + }; + + #[expect(unsafe_code)] + let exit_states = scratch.scoped(|scoped| { + let analysis = TraversalLivenessAnalysis { vertex }; + let DataflowResults { + analysis: _, + entry_states: _, + exit_states, + } = analysis.iterate_to_fixpoint_in(body, &scoped); + + let mut exit_states_boxed = + Box::new_uninit_slice_in(exit_states.len(), alloc.clone()); + let (_, rest) = exit_states_boxed.write_iter(exit_states); + debug_assert!(rest.is_empty()); + + // SAFETY: exit_states.len() == exit_states_boxed.len() by construction + let exit_states = unsafe { exit_states_boxed.assume_init() }; + BasicBlockLiveOut(BasicBlockSlice::from_boxed_slice(exit_states)) + }); + + live_out.insert(body.id, exit_states); + } + + Self { + env, + bodies, + execution, + live_out: LiveOut(live_out), + diagnostics: DiagnosticIssues::new(), + alloc, + } + } +} diff --git a/libs/@local/hashql/eval/src/postgres/continuation.rs b/libs/@local/hashql/eval/src/postgres/continuation.rs new file mode 100644 index 00000000000..b722e1e0ab9 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/continuation.rs @@ -0,0 +1,145 @@ +//! Naming conventions and helpers for the continuation LATERAL subqueries. +//! +//! Each postgres island in a filter body produces a `CROSS JOIN LATERAL` subquery +//! that evaluates its CASE tree once per row (via `OFFSET 0`) and returns a +//! composite `continuation` value. This module provides the identifiers, column +//! names, and expression helpers used to construct and reference those subqueries. + +use hash_graph_postgres_store::store::postgres::query::{ + self, ColumnName, Expression, Identifier, TableName, TableReference, +}; +use hashql_mir::{def::DefId, pass::execution::IslandId}; + +/// Identifies a specific continuation LATERAL subquery by its body and island. +/// +/// Converted to a [`TableReference`] via [`Self::table_ref`] for use as the +/// LATERAL alias (e.g. `... AS "continuation_0_1"`). +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub(crate) struct ContinuationAlias { + pub body: DefId, + pub island: IslandId, +} + +impl ContinuationAlias { + pub(crate) fn table_ref(self) -> TableReference<'static> { + TableReference { + schema: None, + name: TableName::from(self.identifier()), + alias: None, + } + } + + /// Base identifier for this continuation: `continuation_{body}_{island}`. + pub(crate) fn identifier(self) -> Identifier<'static> { + Identifier::from(format!("continuation_{}_{}", self.body, self.island)) + } + + /// Column alias for a decomposed field: `continuation_{body}_{island}_{field}`. + pub(crate) fn field_identifier(self, field: ContinuationColumn) -> Identifier<'static> { + Identifier::from(format!( + "continuation_{}_{}_{}", + self.body, + self.island, + field.as_str(), + )) + } +} + +/// All column names used within the continuation LATERAL subquery and the +/// `continuation` composite type. +/// +/// [`Self::Entry`] is the alias for the composite value in the LATERAL's SELECT +/// list. The remaining variants are fields of the composite type itself. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub(crate) enum ContinuationColumn { + /// The composite `continuation` value column in the LATERAL subquery. + /// + /// The LATERAL is `(SELECT ::continuation AS c OFFSET 0) AS f0`, + /// so field access is `(f0."c")."filter"`. + Entry, + /// The filter boolean. `NULL` means passthrough, `true` keeps, `false` rejects. + Filter, + /// The target basic block id for island exits. + Block, + /// Array of local ids being transferred on island exit. + Locals, + /// Array of jsonb values corresponding to [`Self::Locals`]. + Values, +} + +impl ContinuationColumn { + pub(crate) const fn as_str(self) -> &'static str { + match self { + Self::Entry => "row", + Self::Filter => "filter", + Self::Block => "block", + Self::Locals => "locals", + Self::Values => "values", + } + } + + pub(crate) fn identifier(self) -> Identifier<'static> { + Identifier::from(self.as_str()) + } + + pub(crate) fn column_name(self) -> ColumnName<'static> { + ColumnName::from(self.identifier()) + } +} + +/// Builds a `FieldAccess` expression that accesses a field of the continuation +/// composite through the LATERAL alias. +/// +/// Produces: `("continuation_X_Y"."c")."field"`. +pub(crate) fn field_access( + alias: &TableReference<'static>, + field: ContinuationColumn, +) -> Expression { + Expression::FieldAccess { + expr: Box::new(Expression::ColumnReference(query::ColumnReference { + correlation: Some(alias.clone()), + name: ContinuationColumn::Entry.column_name(), + })), + field: field.column_name(), + } +} + +/// Builds the WHERE condition for a continuation: `(f0.c).filter IS NOT FALSE`. +/// +/// This passes rows where filter is `TRUE` (keep) or `NULL` (no opinion), +/// and rejects only `FALSE`. +pub(crate) fn filter_condition(alias: &TableReference<'static>) -> Expression { + Expression::Unary(query::UnaryExpression { + op: query::UnaryOperator::IsNotFalse, + expr: Box::new(field_access(alias, ContinuationColumn::Filter)), + }) +} + +#[cfg(test)] +mod tests { + use hashql_mir::{def::DefId, pass::execution::IslandId}; + + use super::{ContinuationAlias, ContinuationColumn}; + + fn alias(body: u32, island: u32) -> ContinuationAlias { + ContinuationAlias { + body: DefId::new(body), + island: IslandId::new(island), + } + } + + #[test] + fn alias_naming() { + let alias = alias(0, 1); + assert_eq!(alias.identifier().as_ref(), "continuation_0_1"); + } + + #[test] + fn field_identifier_naming() { + let alias = alias(2, 3); + assert_eq!( + alias.field_identifier(ContinuationColumn::Block).as_ref(), + "continuation_2_3_block" + ); + } +} diff --git a/libs/@local/hashql/eval/src/postgres/error.rs b/libs/@local/hashql/eval/src/postgres/error.rs new file mode 100644 index 00000000000..cdce1994c81 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/error.rs @@ -0,0 +1,310 @@ +//! Diagnostic categories and constructors for the PostgreSQL compilation backend. +//! +//! All diagnostics in this module represent compiler-internal invariant violations: MIR +//! constructs that should have been rejected by the execution placement pass before reaching +//! SQL lowering. + +use alloc::borrow::Cow; + +use hashql_core::span::SpanId; +use hashql_diagnostics::{ + Diagnostic, Label, + category::{DiagnosticCategory, TerminalDiagnosticCategory}, + diagnostic::Message, + severity::Severity, +}; + +use crate::error::{EvalDiagnostic, EvalDiagnosticCategory}; + +const UNSUPPORTED_VERTEX_TYPE: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "unsupported-vertex-type", + name: "Unsupported Vertex Type", +}; + +const ENTITY_PATH_RESOLUTION: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "entity-path-resolution", + name: "Cannot Resolve Entity Property Path", +}; + +const INVALID_ENV_ACCESS: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "invalid-env-access", + name: "Invalid Captured Variable Access", +}; + +const INVALID_ENV_PROJECTION: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "invalid-env-projection", + name: "Invalid Captured Variable Projection", +}; + +const CLOSURE_APPLICATION: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "closure-application", + name: "Closure Calls Not Supported in SQL", +}; + +const CLOSURE_AGGREGATE: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "closure-aggregate", + name: "Closure Construction Not Supported in SQL", +}; + +const FUNCTION_POINTER_CONSTANT: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "function-pointer-constant", + name: "Function Pointers Not Supported in SQL", +}; + +const PROJECTED_ASSIGNMENT: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "projected-assignment", + name: "Projected Assignment in SSA", +}; + +const GRAPH_READ_TERMINATOR: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "graph-read-terminator", + name: "Nested Graph Reads Not Supported in SQL", +}; + +const MISSING_ISLAND_GRAPH: TerminalDiagnosticCategory = TerminalDiagnosticCategory { + id: "missing-island-graph", + name: "Missing Island Graph for Body", +}; + +/// Diagnostic categories for bugs and unsupported constructs encountered during SQL compilation. +/// +/// These categories cover internal compiler invariants (e.g. "placement should have rejected +/// this") and mismatches between MIR expectations and SQL lowering capabilities (e.g. entity-path +/// resolution failures). +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum PostgresDiagnosticCategory { + /// A non-entity vertex type reached the SQL backend. + UnsupportedVertexType, + /// An [`EntityPath`] could not be mapped to a PostgreSQL column or expression. + /// + /// [`EntityPath`]: hashql_mir::pass::execution::traversal::EntityPath + EntityPathResolution, + /// The captured environment was referenced as a value instead of being field-projected. + InvalidEnvAccess, + /// A non-field projection was applied to the captured environment. + InvalidEnvProjection, + /// A closure call reached the SQL backend. + ClosureApplication, + /// A closure value construction reached the SQL backend. + ClosureAggregate, + /// A function pointer constant reached the SQL backend. + FunctionPointerConstant, + /// MIR contained an assignment to a projected place (invalid in SSA form). + ProjectedAssignment, + /// A nested graph read terminator reached the SQL backend. + GraphReadTerminator, + /// Island analysis did not produce an island graph for a filter body. + MissingIslandGraph, +} + +impl DiagnosticCategory for PostgresDiagnosticCategory { + fn id(&self) -> Cow<'_, str> { + Cow::Borrowed("postgres") + } + + fn name(&self) -> Cow<'_, str> { + Cow::Borrowed("Postgres") + } + + fn subcategory(&self) -> Option<&dyn DiagnosticCategory> { + match self { + Self::UnsupportedVertexType => Some(&UNSUPPORTED_VERTEX_TYPE), + Self::EntityPathResolution => Some(&ENTITY_PATH_RESOLUTION), + Self::InvalidEnvAccess => Some(&INVALID_ENV_ACCESS), + Self::InvalidEnvProjection => Some(&INVALID_ENV_PROJECTION), + Self::ClosureApplication => Some(&CLOSURE_APPLICATION), + Self::ClosureAggregate => Some(&CLOSURE_AGGREGATE), + Self::FunctionPointerConstant => Some(&FUNCTION_POINTER_CONSTANT), + Self::ProjectedAssignment => Some(&PROJECTED_ASSIGNMENT), + Self::GraphReadTerminator => Some(&GRAPH_READ_TERMINATOR), + Self::MissingIslandGraph => Some(&MISSING_ISLAND_GRAPH), + } + } +} + +const fn category(category: PostgresDiagnosticCategory) -> EvalDiagnosticCategory { + EvalDiagnosticCategory::Postgres(category) +} + +#[coverage(off)] +pub(super) fn unsupported_vertex_type(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::UnsupportedVertexType), + Severity::Bug, + ) + .primary(Label::new( + span, + "query operates on a vertex type that cannot be compiled to SQL", + )); + + diagnostic.add_message(Message::note( + "the HASH type system supports entities, entity types, property types, and data types, \ + but only entity queries can currently be compiled to SQL; the type system should not \ + have enabled SQL compilation for this vertex type", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn entity_path_resolution(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::EntityPathResolution), + Severity::Bug, + ) + .primary(Label::new( + span, + "cannot map this property access to a SQL column", + )); + + diagnostic.add_message(Message::note( + "indicates a mismatch between the entity type declaration and the SQL column mapping; \ + this can happen when they get out of sync", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn invalid_env_access(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::InvalidEnvAccess), + Severity::Bug, + ) + .primary(Label::new( + span, + "direct access to the captured environment without field destructuring", + )); + + diagnostic.add_message(Message::note( + "the environment is captured implicitly as the first argument and immediately \ + destructured; a direct reference is never generated by the compiler", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn invalid_env_projection(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::InvalidEnvProjection), + Severity::Bug, + ) + .primary(Label::new( + span, + "non-field projection on the captured environment", + )); + + diagnostic.add_message(Message::note( + "the environment is captured implicitly as the first argument and only accessed via field \ + projections; other projection kinds are never generated by the compiler", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn closure_application(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::ClosureApplication), + Severity::Bug, + ) + .primary(Label::new(span, "closure call cannot be compiled to SQL")); + + diagnostic.add_message(Message::note( + "the statement placement pass should have rejected this from the Postgres backend", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn closure_aggregate(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::ClosureAggregate), + Severity::Bug, + ) + .primary(Label::new( + span, + "closure construction cannot be compiled to SQL", + )); + + diagnostic.add_message(Message::note( + "the statement placement pass should have rejected this from the Postgres backend", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn function_pointer_constant(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::FunctionPointerConstant), + Severity::Bug, + ) + .primary(Label::new( + span, + "function pointer cannot be compiled to SQL", + )); + + diagnostic.add_message(Message::note( + "the statement placement pass should have rejected this from the Postgres backend", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn projected_assignment(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::ProjectedAssignment), + Severity::Bug, + ) + .primary(Label::new( + span, + "assignment to a projected place in SSA form", + )); + + diagnostic.add_message(Message::note( + "MIR is always in SSA form; projected assignments should never exist", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn graph_read_terminator(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::GraphReadTerminator), + Severity::Bug, + ) + .primary(Label::new( + span, + "nested graph read cannot appear in SQL-compiled code", + )); + + diagnostic.add_message(Message::note( + "the statement placement pass should have rejected this from the Postgres backend", + )); + + diagnostic +} + +#[coverage(off)] +pub(super) fn missing_island_graph(span: SpanId) -> EvalDiagnostic { + let mut diagnostic = Diagnostic::new( + category(PostgresDiagnosticCategory::MissingIslandGraph), + Severity::Bug, + ) + .primary(Label::new( + span, + "no island graph found for this filter body", + )); + + diagnostic.add_message(Message::note( + "the island analysis pass should have produced an island graph for every graph read \ + filter body; its absence indicates a compiler bug", + )); + + diagnostic +} diff --git a/libs/@local/hashql/eval/src/postgres/filter/mod.rs b/libs/@local/hashql/eval/src/postgres/filter/mod.rs new file mode 100644 index 00000000000..2486472cdf6 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/filter/mod.rs @@ -0,0 +1,788 @@ +//! CFG-to-SQL lowering for graph read filter bodies. +//! +//! A [`GraphReadFilterCompiler`] compiles the Postgres-placed portion of a filter body (an +//! [`IslandNode`]) into a single SQL [`Expression`]. The expression is a `CASE` tree that +//! evaluates MIR control flow and produces a `continuation` composite value. +//! +//! ## Strategy +//! +//! - Statements are compiled in execution order, tracking each MIR local as a SQL expression. +//! - Control flow is compiled using an explicit stack of [`Frame`]s rather than recursion. +//! - [`SwitchInt`] becomes `CASE WHEN ... THEN ... ELSE ... END` via [`finish_switch_int`]. +//! - When control leaves the current Postgres island, compilation emits an "island exit" +//! continuation that transfers live-out locals back to the interpreter. +//! +//! Unsupported constructs (closures, nested graph reads, etc.) emit diagnostics and lower to +//! `NULL` placeholders so compilation can continue and report multiple issues. +//! +//! [`SwitchInt`]: hashql_mir::body::terminator::SwitchInt + +#[cfg(test)] +mod tests; + +use alloc::alloc::Global; +use core::alloc::Allocator; + +use hash_graph_postgres_store::store::postgres::query::{ + self, BinaryExpression, BinaryOperator, Expression, PostgresType, UnaryExpression, + UnaryOperator, +}; +use hashql_core::{ + graph::Predecessors as _, + id::{ + Id as _, IdSnapshotVec, + snapshot_vec::{AppendOnly, Snapshot}, + }, + span::SpanId, +}; +use hashql_diagnostics::DiagnosticIssues; +use hashql_hir::node::operation::{InputOp, UnOp}; +use hashql_mir::{ + body::{ + Body, + basic_block::{BasicBlock, BasicBlockId}, + constant::Constant, + local::{Local, LocalSnapshotVec}, + operand::Operand, + place::{FieldIndex, Place, Projection, ProjectionKind}, + rvalue::{Aggregate, AggregateKind, Apply, BinOp, Binary, Input, RValue, Unary}, + statement::{Assign, Statement, StatementKind}, + terminator::{Goto, Return, SwitchInt, SwitchTargets, Target, TerminatorKind}, + }, + pass::execution::{IslandNode, TargetId, VertexType, traversal::EntityPath}, +}; + +use super::{ + DatabaseContext, + error::{ + closure_aggregate, closure_application, entity_path_resolution, function_pointer_constant, + graph_read_terminator, invalid_env_access, invalid_env_projection, projected_assignment, + unsupported_vertex_type, + }, + traverse::eval_entity_path, +}; +use crate::{context::EvalContext, error::EvalDiagnosticIssues}; + +/// Internal representation of a continuation result before casting to the SQL composite type. +/// +/// This mirrors the runtime continuation contract: +/// - [`Return`]: produces a filter decision for the current row. +/// - [`IslandExit`]: transfers control back to the interpreter with a next-block id and live +/// locals. +/// - [`Null`]: produces an all-`NULL` continuation used after unrecoverable lowering errors. +/// +/// [`Return`]: Self::Return +/// [`IslandExit`]: Self::IslandExit +/// [`Null`]: Self::Null +enum Continuation { + /// The filter body returned a boolean filter decision. + Return { filter: Expression }, + /// Control flow left the current island, transferring live locals to the interpreter. + IslandExit { + block: BasicBlockId, + locals: Vec, + values: Vec, + }, + /// Error sentinel; all fields are `NULL`. + Null, +} + +impl From for Expression { + fn from(continuation: Continuation) -> Self { + let null = Self::Constant(query::Constant::Null); + + // Row fields must match the `continuation` composite type: + // (filter, block, locals, values) + let row = match continuation { + Continuation::Return { filter } => { + vec![filter, null.clone(), null.clone(), null] + } + Continuation::IslandExit { + block, + locals, + values, + } => { + vec![ + null, + Self::Constant(query::Constant::U32(block.as_u32())), + Self::Function(query::Function::ArrayLiteral { + elements: locals, + element_type: PostgresType::Int, + }), + Self::Function(query::Function::ArrayLiteral { + elements: values, + element_type: PostgresType::JsonB, + }), + ] + } + Continuation::Null => { + vec![null.clone(), null.clone(), null.clone(), null] + } + }; + + Self::Row(row).cast(PostgresType::Continuation) + } +} + +/// Stack frame for the iterative CFG-to-SQL compiler. +/// +/// The compiler walks basic blocks using an explicit stack instead of recursion. Frames represent +/// pending work items: +/// +/// - [`Compile`]: compile the block at the given id. +/// - [`Enter`]: snapshot locals, assign block parameters, then compile the target block. +/// - [`Rollback`]: restore the local-expression map after a branch has been compiled. +/// - [`FinishSwitchInt`]: assemble a `CASE` expression from the already-compiled branch results. +/// +/// [`Compile`]: Self::Compile +/// [`Enter`]: Self::Enter +/// [`Rollback`]: Self::Rollback +/// [`FinishSwitchInt`]: Self::FinishSwitchInt +enum Frame<'ctx, 'heap> { + Compile(BasicBlockId), + Enter { + from: BasicBlockId, + to: Target<'heap>, + }, + Rollback(Snapshot), + FinishSwitchInt { + discriminant: Box, + targets: &'ctx SwitchTargets<'heap>, + }, +} + +/// Assembles a `CASE` expression from the completed branches of a `SwitchInt`. +/// +/// Branch results are expected on `results` in the same order as `targets.values()`, with an +/// optional "otherwise" result on top. This function drains those results and pushes a single +/// [`Expression::CaseWhen`] back onto `results`. +fn finish_switch_int( + results: &mut Vec, + discriminant: Expression, + targets: &SwitchTargets<'_>, +) { + let else_result = targets + .has_otherwise() + .then(|| Box::new(results.pop().unwrap_or_else(|| unreachable!()))); + + // Branch results were pushed in forward order (first target first), so + // draining the tail gives them in the same order as `targets.values()`. + let start = results.len() - targets.values().len(); + let branch_results = results.drain(start..); + + debug_assert_eq!(branch_results.len(), targets.values().len()); + + // SwitchInt compares the discriminant against integer values. If the + // discriminant is a boolean expression (e.g. `IS NOT NULL`), PostgreSQL + // rejects `boolean = integer`. Casting to `::int` is safe for all types + // and a no-op when the discriminant is already integral. + let discriminant = Box::new(discriminant.grouped().cast(PostgresType::Int)); + + let mut discriminant = Some(discriminant); + let mut conditions = Vec::with_capacity(targets.values().len()); + + for (index, (&value, then)) in targets.values().iter().zip(branch_results).enumerate() { + let is_last = index == targets.values().len() - 1; + let discriminant = if is_last { + discriminant.take().unwrap_or_else(|| unreachable!()) + } else { + discriminant.clone().unwrap_or_else(|| unreachable!()) + }; + + let when = Expression::Binary(BinaryExpression { + op: BinaryOperator::Equal, + left: discriminant, + right: Box::new(Expression::Constant(query::Constant::U128(value))), + }); + + conditions.push((when, then)); + } + + results.push(Expression::CaseWhen { + conditions, + else_result, + }); +} + +/// Compiles a Postgres island of a graph-read filter body into a `continuation` expression. +/// +/// The compiler maintains a mapping from MIR locals to SQL expressions, supports +/// snapshot/rollback across branching control flow, and accumulates diagnostics into an +/// internal buffer retrievable via [`Self::into_diagnostics`]. +pub(crate) struct GraphReadFilterCompiler<'ctx, 'heap, A: Allocator = Global, S: Allocator = Global> +{ + context: &'ctx EvalContext<'ctx, 'heap, A>, + + body: &'ctx Body<'heap>, + /// MIR local → SQL expression mapping, with snapshot/rollback for branching. + locals: LocalSnapshotVec, AppendOnly, S>, + diagnostics: EvalDiagnosticIssues, + + scratch: S, +} + +impl<'ctx, 'heap, A: Allocator, S: Allocator> GraphReadFilterCompiler<'ctx, 'heap, A, S> { + pub(crate) fn new( + context: &'ctx EvalContext<'ctx, 'heap, A>, + body: &'ctx Body<'heap>, + scratch: S, + ) -> Self + where + S: Clone, + { + Self { + context, + body, + locals: IdSnapshotVec::new_in(scratch.clone()), + diagnostics: DiagnosticIssues::new(), + scratch, + } + } + + pub(crate) fn into_diagnostics(self) -> EvalDiagnosticIssues { + self.diagnostics + } + + fn compile_place_vertex<'place>( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + place: &'place Place<'heap>, + ) -> (Expression, &'place [Projection<'heap>]) { + let Some(r#type) = + VertexType::from_local(self.context.env, &self.body.local_decls[Local::VERTEX]) + else { + self.diagnostics.push(unsupported_vertex_type(span)); + return (Expression::Constant(query::Constant::Null), &[]); + }; + + match r#type { + VertexType::Entity => { + let Some((path, consumed)) = EntityPath::resolve(&place.projections) else { + self.diagnostics.push(entity_path_resolution(span)); + return (Expression::Constant(query::Constant::Null), &[]); + }; + + let base = eval_entity_path(db, path); + + (base, &place.projections[consumed..]) + } + } + } + + /// Only field projections are supported on the environment; other projection kinds emit + /// diagnostics. + fn compile_place_env<'place>( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + place: &'place Place<'heap>, + ) -> (Expression, &'place [Projection<'heap>]) { + match &*place.projections { + [] => { + self.diagnostics.push(invalid_env_access(span)); + (Expression::Constant(query::Constant::Null), &[]) + } + [ + Projection { + r#type: _, + kind: ProjectionKind::Field(field), + }, + rest @ .., + ] => { + let param = db.parameters.env(*field); + (param.into(), rest) + } + [..] => { + self.diagnostics.push(invalid_env_projection(span)); + (Expression::Constant(query::Constant::Null), &[]) + } + } + } + + fn compile_place( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + place: &Place<'heap>, + ) -> Expression { + let (mut expression, projections) = match place.local { + Local::ENV => self.compile_place_env(db, span, place), + Local::VERTEX => self.compile_place_vertex(db, span, place), + _ => ( + self.locals + .lookup(place.local) + .cloned() + .unwrap_or_else(|| unreachable!("use before def")), + &*place.projections, + ), + }; + + if !projections.is_empty() { + let mut arguments = Vec::with_capacity(projections.len() + 1); + arguments.push(expression); + + for projection in projections { + let index = match &projection.kind { + // TODO: in the future if we desugar struct FieldByName to FieldByIndex we need + // to convert back here + ProjectionKind::Field(field_index) => { + Expression::Constant(query::Constant::U32(field_index.as_u32())) + } + &ProjectionKind::FieldByName(symbol) => db.parameters.symbol(symbol).into(), + &ProjectionKind::Index(local) => self + .locals + .lookup(local) + .unwrap_or_else(|| unreachable!("use before def")) + .clone(), + }; + + // `json_extract_path` takes text arguments, so all indices (including + // numeric field indices) must be cast to text. + arguments.push(index.grouped().cast(PostgresType::Text)); + } + + expression = Expression::Function(query::Function::JsonExtractPath(arguments)); + } + + expression + } + + fn compile_constant( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + constant: &Constant<'heap>, + ) -> Expression { + match constant { + Constant::Int(int) if let Some(uint) = int.as_u32() => { + Expression::Constant(query::Constant::U32(uint)) + } + &Constant::Int(int) => db.parameters.int(int).into(), + &Constant::Primitive(primitive) => db.parameters.primitive(primitive).into(), + // Unit is the zero-sized type, represented as JSON `null` inside jsonb values. + Constant::Unit => Expression::Constant(query::Constant::JsonNull), + Constant::FnPtr(_) => { + self.diagnostics.push(function_pointer_constant(span)); + Expression::Constant(query::Constant::Null) + } + } + } + + fn compile_operand( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + operand: &Operand<'heap>, + ) -> Expression { + match operand { + Operand::Place(place) => self.compile_place(db, span, place), + Operand::Constant(constant) => self.compile_constant(db, span, constant), + } + } + + fn compile_unary( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + Unary { op, operand }: &Unary<'heap>, + ) -> Expression { + let operand = self.compile_operand(db, span, operand); + + let op = match *op { + UnOp::Not => UnaryOperator::Not, + UnOp::BitNot => UnaryOperator::BitwiseNot, + UnOp::Neg => UnaryOperator::Negate, + }; + + Expression::Unary(UnaryExpression { + op, + expr: Box::new(operand), + }) + } + + fn compile_binary( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + Binary { op, left, right }: &Binary<'heap>, + ) -> Expression { + let mut left = self.compile_operand(db, span, left); + let mut right = self.compile_operand(db, span, right); + + // Operands coming from jsonb extraction are untyped from Postgres' perspective. + // Arithmetic and bitwise operators need explicit casts; comparisons work on jsonb + // directly. + let (op, cast) = match *op { + BinOp::Add => (BinaryOperator::Add, Some(PostgresType::Numeric)), + BinOp::Sub => (BinaryOperator::Subtract, Some(PostgresType::Numeric)), + BinOp::BitAnd => (BinaryOperator::BitwiseAnd, Some(PostgresType::BigInt)), + BinOp::BitOr => (BinaryOperator::BitwiseOr, Some(PostgresType::BigInt)), + BinOp::Eq => (BinaryOperator::Equal, None), + BinOp::Ne => (BinaryOperator::NotEqual, None), + BinOp::Lt => (BinaryOperator::Less, None), + BinOp::Lte => (BinaryOperator::LessOrEqual, None), + BinOp::Gt => (BinaryOperator::Greater, None), + BinOp::Gte => (BinaryOperator::GreaterOrEqual, None), + }; + + if let Some(target) = cast { + left = left.grouped().cast(target.clone()); + right = right.grouped().cast(target); + } + + Expression::Binary(BinaryExpression { + op, + left: Box::new(left), + right: Box::new(right), + }) + } + + fn compile_input( + db: &mut DatabaseContext<'heap, A>, + Input { op, name }: &Input<'heap>, + ) -> Expression { + let index = db.parameters.input(*name); + + match *op { + InputOp::Load { required: _ } => index.into(), + InputOp::Exists => Expression::Unary(UnaryExpression { + op: UnaryOperator::Not, + expr: Box::new(Expression::Unary(UnaryExpression { + op: UnaryOperator::IsNull, + expr: Box::new(index.into()), + })), + }), + } + } + + fn compile_aggregate( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + Aggregate { kind, operands }: &Aggregate<'heap>, + ) -> Expression { + match kind { + AggregateKind::Tuple => { + let mut expressions = Vec::with_capacity(operands.len()); + + for operand in operands { + expressions.push(self.compile_operand(db, span, operand)); + } + + // Values are reconstructed to their corresponding tuple and struct definitions + // using type-directed deserialization. + Expression::Function(query::Function::JsonBuildArray(expressions)) + } + AggregateKind::Struct { fields } => { + debug_assert_eq!(fields.len(), operands.len()); + + let mut expressions = Vec::with_capacity(fields.len()); + + for (&key, value) in fields.iter().zip(operands) { + let key = db.parameters.symbol(key); + let value = self.compile_operand(db, span, value); + + expressions.push((key.into(), value)); + } + + // Values are reconstructed to their corresponding tuple and struct definitions + // using type-directed deserialization. + Expression::Function(query::Function::JsonBuildObject(expressions)) + } + AggregateKind::List => { + let mut expressions = Vec::with_capacity(operands.len()); + + for operand in operands { + expressions.push(self.compile_operand(db, span, operand)); + } + + Expression::Function(query::Function::JsonBuildArray(expressions)) + } + #[expect(clippy::integer_division_remainder_used, clippy::integer_division)] + AggregateKind::Dict => { + debug_assert_eq!(operands.len() % 2, 0); + + let mut expressions = Vec::with_capacity(operands.len() / 2); + + for [key, value] in operands.iter().array_chunks() { + expressions.push(( + self.compile_operand(db, span, key), + self.compile_operand(db, span, value), + )); + } + + Expression::Function(query::Function::JsonBuildObject(expressions)) + } + AggregateKind::Opaque(_) => { + debug_assert_eq!(operands.len(), 1); + + self.compile_operand(db, span, &operands[FieldIndex::OPAQUE_VALUE]) + } + AggregateKind::Closure => { + self.diagnostics.push(closure_aggregate(span)); + Expression::Constant(query::Constant::Null) + } + } + } + + fn compile_apply(&mut self, span: SpanId, _: &Apply<'heap>) -> Expression { + self.diagnostics.push(closure_application(span)); + Expression::Constant(query::Constant::Null) + } + + fn compile_rvalue( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + rvalue: &RValue<'heap>, + ) -> Expression { + match rvalue { + RValue::Load(operand) => self.compile_operand(db, span, operand), + RValue::Binary(binary) => self.compile_binary(db, span, binary), + RValue::Unary(unary) => self.compile_unary(db, span, unary), + RValue::Aggregate(aggregate) => self.compile_aggregate(db, span, aggregate), + RValue::Input(input) => Self::compile_input(db, input), + RValue::Apply(apply) => self.compile_apply(span, apply), + } + } + + fn compile_statement( + &mut self, + db: &mut DatabaseContext<'heap, A>, + statement: &Statement<'heap>, + ) { + let Assign { lhs, rhs } = match &statement.kind { + StatementKind::Assign(assign) => assign, + StatementKind::Nop | StatementKind::StorageLive(_) | StatementKind::StorageDead(_) => { + return; + } + }; + + if !lhs.projections.is_empty() { + self.diagnostics.push(projected_assignment(statement.span)); + return; + } + + let rvalue = self.compile_rvalue(db, statement.span, rhs); + self.locals.insert(lhs.local, rvalue); + } + + fn assign_params( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + target: &Target<'heap>, + ) -> &'ctx BasicBlock<'heap> { + let target_block = &self.body.basic_blocks[target.block]; + debug_assert_eq!(target_block.params.len(), target.args.len()); + + for (¶m, arg) in target_block.params.iter().zip(target.args.iter()) { + let expression = self.compile_operand(db, span, arg); + self.locals.insert(param, expression); + } + + target_block + } + + fn find_entry_block(&self, island: &IslandNode) -> BasicBlockId { + for block in island.members() { + if self + .body + .basic_blocks + .predecessors(block) + .all(|pred| !island.contains(pred)) + { + return block; + } + } + + unreachable!("The postgres island always has an entry block (BasicBlockId::START)") + } + + fn compile_island_exit( + &mut self, + db: &mut DatabaseContext<'heap, A>, + span: SpanId, + current: BasicBlockId, + target: &Target<'heap>, + ) -> Expression { + // Re-acquire live-out information because `compile_operand` borrows `self` mutably. + let live_out = &self.context.live_out[(self.body.id, current)]; + + let mut locals = Vec::with_capacity(target.args.len() + live_out.count()); + let mut values = Vec::with_capacity(target.args.len() + live_out.count()); + + // Block parameters from the jump target come first, then all remaining live-out + // locals. The interpreter zips locals[i] with values[i] to restore each binding. + let target_block = &self.body.basic_blocks[target.block]; + debug_assert_eq!(target_block.params.len(), target.args.len()); + + for (¶m, arg) in target_block.params.iter().zip(target.args) { + let value = self.compile_operand(db, span, arg); + + locals.push(Expression::Constant(query::Constant::U32(param.as_u32()))); + values.push(value); + } + + for local in live_out { + let value = self + .locals + .lookup(local) + .unwrap_or_else(|| unreachable!("use before def")) + .clone(); + + locals.push(Expression::Constant(query::Constant::U32(local.as_u32()))); + values.push(value); + } + + Continuation::IslandExit { + block: target.block, + locals, + values, + } + .into() + } + + fn compile_blocks( + &mut self, + db: &mut DatabaseContext<'heap, A>, + stack: &mut Vec, S>, + results: &mut Vec, + start: BasicBlockId, + island: &IslandNode, + ) { + let mut block_id = start; + + // Follow GOTOs directly instead of going through the stack, skipping superfluous + // snapshot and rollback frames. + loop { + let block = &self.body.basic_blocks[block_id]; + + for statement in &block.statements { + self.compile_statement(db, statement); + } + + let terminator_span = block.terminator.span; + + match &block.terminator.kind { + TerminatorKind::Goto(Goto { target }) => { + if !island.contains(target.block) { + let exit = self.compile_island_exit(db, terminator_span, block_id, target); + results.push(exit); + + break; + } + + self.assign_params(db, terminator_span, target); + block_id = target.block; + } + TerminatorKind::SwitchInt(SwitchInt { + discriminant, + targets, + }) => { + let discriminant = self.compile_operand(db, terminator_span, discriminant); + + stack.push(Frame::FinishSwitchInt { + discriminant: Box::new(discriminant), + targets, + }); + + // Targets are pushed in reverse so that the first target is on top + // and gets processed first. This ensures results land on the result + // stack in the same order as `targets.values()`. + if let Some(otherwise) = targets.otherwise() { + stack.push(Frame::Enter { + from: block_id, + to: otherwise, + }); + } + + for (_, target) in targets.iter().rev() { + stack.push(Frame::Enter { + from: block_id, + to: target, + }); + } + + break; + } + TerminatorKind::Return(Return { value }) => { + let filter = self.compile_operand(db, terminator_span, value); + results.push(Continuation::Return { filter }.into()); + + break; + } + TerminatorKind::GraphRead(_) => { + self.diagnostics + .push(graph_read_terminator(block.terminator.span)); + results.push(Continuation::Null.into()); + break; + } + TerminatorKind::Unreachable => unreachable!(), + } + } + } + + pub(crate) fn compile_body( + &mut self, + db: &mut DatabaseContext<'heap, A>, + island: &IslandNode, + ) -> Expression + where + S: Clone, + { + debug_assert_eq!(island.target(), TargetId::Postgres); + + let mut stack = Vec::new_in(self.scratch.clone()); + stack.push(Frame::Compile(self.find_entry_block(island))); + + let mut results = Vec::new_in(self.scratch.clone()); + + while let Some(frame) = stack.pop() { + match frame { + Frame::Compile(start) => { + self.compile_blocks(db, &mut stack, &mut results, start, island); + } + + Frame::Enter { from, to: target } => { + let span = self.body.basic_blocks[from].terminator.span; + + // Target may be outside the island (e.g. an otherwise-branch + // of a SwitchInt that jumps back to the interpreter). + if !island.contains(target.block) { + let exit = self.compile_island_exit(db, span, from, &target); + + results.push(exit); + continue; + } + + let snapshot = self.locals.snapshot(); + + self.assign_params(db, span, &target); + + // Rollback is pushed first so it runs after the block completes + // (LIFO), restoring locals for the next sibling branch. + stack.push(Frame::Rollback(snapshot)); + stack.push(Frame::Compile(target.block)); + } + + Frame::Rollback(snapshot) => { + self.locals.rollback_to(snapshot); + } + + Frame::FinishSwitchInt { + discriminant, + targets, + } => { + finish_switch_int(&mut results, *discriminant, targets); + } + } + } + + let result = results.pop().expect("no result produced"); + debug_assert!(results.is_empty()); + + result + } +} diff --git a/libs/@local/hashql/eval/src/postgres/filter/tests.rs b/libs/@local/hashql/eval/src/postgres/filter/tests.rs new file mode 100644 index 00000000000..3d0f1ffad01 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/filter/tests.rs @@ -0,0 +1,1007 @@ +//! MIR builder snapshot tests for the Postgres filter compiler. +//! +//! These tests construct MIR bodies programmatically via the `body!` macro, run the full +//! execution analysis pipeline to compute island boundaries, then compile the Postgres islands +//! to SQL. This exercises MIR constructs that exist in the compiler but cannot yet be produced +//! from J-Expr — either because the HIR specialization phase doesn't support the intrinsic +//! (e.g. arithmetic, unary NOT) or because the type system can't resolve the access yet +//! (e.g. property field subscripts on generic entity types). +#![expect(clippy::min_ident_chars)] + +use alloc::alloc::Global; +use std::path::PathBuf; + +use hash_graph_postgres_store::store::postgres::query::{Expression, Transpile as _}; +use hashql_core::{ + heap::{Heap, Scratch}, + id::Id as _, + symbol::sym, + r#type::{TypeBuilder, TypeId, environment::Environment}, +}; +use hashql_diagnostics::DiagnosticIssues; +use hashql_hir::node::operation::InputOp; +use hashql_mir::{ + body::{Body, Source, basic_block::BasicBlockId, local::Local, terminator::GraphReadBody}, + builder::{BodyBuilder, body}, + context::MirContext, + def::{DefId, DefIdVec}, + intern::Interner, + pass::{ + GlobalAnalysisPass as _, + analysis::SizeEstimationAnalysis, + execution::{ExecutionAnalysis, ExecutionAnalysisResidual, IslandKind, TargetId}, + }, +}; +use insta::{Settings, assert_snapshot}; + +use crate::{ + context::EvalContext, + postgres::{DatabaseContext, PostgresCompiler, filter::GraphReadFilterCompiler}, +}; + +/// Runs the full execution analysis pipeline on a single `body!`-constructed filter body +/// and returns everything needed for compilation. +struct Fixture<'heap> { + env: Environment<'heap>, + bodies: DefIdVec, &'heap Heap>, + execution: DefIdVec>, &'heap Heap>, +} + +impl<'heap> Fixture<'heap> { + fn new(heap: &'heap Heap, env: Environment<'heap>, body: Body<'heap>) -> Self { + assert!( + matches!(body.source, Source::GraphReadFilter(_)), + "these tests require GraphReadFilter bodies", + ); + + let interner = Interner::new(heap); + let mut scratch = Scratch::new(); + + let mut bodies = DefIdVec::new_in(heap); + bodies.push(body); + + let mut mir_context = MirContext { + heap, + env: &env, + interner: &interner, + diagnostics: DiagnosticIssues::new(), + }; + + let mut size_analysis = SizeEstimationAnalysis::new_in(&scratch); + size_analysis.run(&mut mir_context, &bodies); + let footprints = size_analysis.finish(); + + let analysis = ExecutionAnalysis { + footprints: &footprints, + scratch: &mut scratch, + }; + let execution = analysis.run_all_in(&mut mir_context, &mut bodies, heap); + + assert!( + mir_context.diagnostics.is_empty(), + "execution analysis produced diagnostics: this likely means the body is malformed", + ); + + Self { + env, + bodies, + execution, + } + } + + fn def(&self) -> DefId { + self.bodies.iter().next().expect("fixture has one body").id + } +} + +struct FilterIslandReport { + entry_block: BasicBlockId, + target: TargetId, + sql: String, +} + +struct FilterReport { + islands: Vec, +} + +impl core::fmt::Display for FilterReport { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + for (i, island) in self.islands.iter().enumerate() { + if i > 0 { + writeln!(f)?; + } + let label = format!( + " Island (entry: bb{}, target: {}) ", + island.entry_block.as_u32(), + island.target, + ); + writeln!(f, "{label:=^80}\n")?; + write!(f, "{}", island.sql)?; + } + Ok(()) + } +} + +fn compile_filter_islands<'heap>(fixture: &Fixture<'heap>, heap: &'heap Heap) -> FilterReport { + let mut scratch = Scratch::new(); + let def = fixture.def(); + + let context = EvalContext::new_in( + &fixture.env, + &fixture.bodies, + &fixture.execution, + heap, + &mut scratch, + ); + + let body = &fixture.bodies[def]; + let residual = fixture.execution[def] + .as_ref() + .expect("residual should exist"); + + // Collect Postgres exec islands sorted by entry block for stable output. + let mut postgres_islands: Vec<_> = residual + .islands + .find(TargetId::Postgres) + .filter(|(_, node)| matches!(node.kind(), IslandKind::Exec(_))) + .map(|(island_id, node)| { + let entry = find_entry_block(body, node); + (island_id, entry) + }) + .collect(); + postgres_islands.sort_by_key(|&(_, entry)| entry.as_u32()); + + let mut island_reports = Vec::new(); + + for (island_id, entry_block) in postgres_islands { + let island = &residual.islands[island_id]; + + let mut db = DatabaseContext::new_in(heap); + let mut compiler = GraphReadFilterCompiler::new(&context, body, Global); + + let expression = compiler.compile_body(&mut db, island); + let diagnostics = compiler.into_diagnostics(); + + assert!( + diagnostics.is_empty(), + "unexpected diagnostics from filter compilation", + ); + + let sql = expression.transpile_to_string(); + + island_reports.push(FilterIslandReport { + entry_block, + target: island.target(), + sql, + }); + } + + FilterReport { + islands: island_reports, + } +} + +fn find_entry_block( + body: &Body<'_>, + island: &hashql_mir::pass::execution::IslandNode, +) -> BasicBlockId { + use hashql_core::graph::Predecessors as _; + + for block in island.members() { + if body + .basic_blocks + .predecessors(block) + .all(|pred| !island.contains(pred)) + { + return block; + } + } + BasicBlockId::START +} +struct QueryReport { + sql: String, + parameters: String, +} + +impl core::fmt::Display for QueryReport { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + writeln!(f, "{:=^80}\n", " SQL ")?; + write!(f, "{}", self.sql)?; + + if !self.parameters.is_empty() { + writeln!(f, "\n{:=^80}\n", " Parameters ")?; + write!(f, "{}", self.parameters)?; + } + Ok(()) + } +} + +fn compile_full_query<'heap>(fixture: &Fixture<'heap>, heap: &'heap Heap) -> QueryReport { + compile_full_query_with_mask(fixture, heap, None) +} + +fn compile_full_query_with_mask<'heap>( + fixture: &Fixture<'heap>, + heap: &'heap Heap, + property_mask: Option, +) -> QueryReport { + let mut scratch = Scratch::new(); + let def = fixture.def(); + + let mut context = EvalContext::new_in( + &fixture.env, + &fixture.bodies, + &fixture.execution, + heap, + &mut scratch, + ); + + let mut filters = hashql_core::heap::Vec::new_in(heap); + filters.push(GraphReadBody::Filter(def, Local::ENV)); + + let read = hashql_mir::body::terminator::GraphRead { + head: hashql_mir::body::terminator::GraphReadHead::Entity { + axis: hashql_mir::body::operand::Operand::Place(hashql_mir::body::place::Place::local( + Local::ENV, + )), + }, + body: filters, + tail: hashql_mir::body::terminator::GraphReadTail::Collect, + target: BasicBlockId::START, + }; + + let prepared_query = { + let mut compiler = + PostgresCompiler::new_in(&mut context, &mut scratch).with_property_mask(property_mask); + compiler.compile(&read) + }; + + assert!( + context.diagnostics.is_empty(), + "unexpected diagnostics from full compilation", + ); + + let sql = prepared_query.transpile().to_string(); + let parameters = format!("{}", prepared_query.parameters); + + QueryReport { sql, parameters } +} + +fn snapshot_settings() -> Settings { + let dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let mut settings = Settings::clone_current(); + settings.set_snapshot_path(dir.join("tests/ui/postgres/filter")); + settings.set_prepend_module_to_snapshot(false); + settings +} + +/// Diamond CFG entirely within one Postgres island. Both branches converge at bb3. +/// Verifies locals snapshot/rollback across the diamond and CASE generation. +#[test] +fn diamond_cfg_merge() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + cond: Bool, result: Bool; + + bb0() { + cond = input.load! "flag"; + if cond then bb1() else bb2(); + }, + bb1() { + goto bb3(true); + }, + bb2() { + goto bb3(false); + }, + bb3(result) { + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("diamond_cfg_merge", report.to_string()); +} + +/// Multi-way switch with 4 value targets + otherwise. +#[test] +fn switch_int_many_branches() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + selector: Int, result: Bool; + + bb0() { + selector = input.load! "sel"; + switch selector [0 => bb1(), 1 => bb2(), 2 => bb3(), 3 => bb4(), _ => bb5()]; + }, + bb1() { return true; }, + bb2() { return false; }, + bb3() { return true; }, + bb4() { return false; }, + bb5() { return true; } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("switch_int_many_branches", report.to_string()); +} + +/// Linear goto chain: bb0 → bb1 → bb2 → return, all within Postgres island, +/// with block parameters passed at each goto. +#[test] +fn straight_line_goto_chain() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + a: Bool, b: Bool, result: Bool; + + bb0() { + a = input.load! "val"; + goto bb1(a); + }, + bb1(b) { + goto bb2(b); + }, + bb2(result) { + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("straight_line_goto_chain", report.to_string()); +} + +/// Goto crossing an island boundary. Entity path loads in bb0 make Interpreter placement +/// expensive enough that the solver prefers Postgres + paying the P→I switch cost. The +/// apply in bb1 forces Interpreter, creating the island exit. +#[test] +fn island_exit_goto() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + rec: ?, temporal: ?, eid: ?, props: ?, archived: ?, + func: [fn() -> ?], result: ?; + @proj v_meta = vertex.metadata: ?, + v_rec = v_meta.record_id: ?, + v_eid = v_rec.entity_id: ?, + v_temporal = v_meta.temporal_versioning: ?, + v_archived = v_meta.archived: ?, + v_props = vertex.properties: ?; + + bb0() { + rec = load v_rec; + temporal = load v_temporal; + eid = load v_eid; + props = load v_props; + archived = load v_archived; + goto bb1(rec); + }, + bb1(result) { + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("island_exit_goto", report.to_string()); +} + +/// Island exit captures both block parameters AND remaining live-out locals. +/// bb0 defines `uuid` (entity path → Postgres) and `extra` (input, live-out); +/// bb1 receives `uuid` as a block param and has an apply to force Interpreter. +#[test] +fn island_exit_with_live_out() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + rec: ?, temporal: ?, props: ?, extra: ?, + func: [fn() -> ?], result: ?; + @proj v_meta = vertex.metadata: ?, + v_rec = v_meta.record_id: ?, + v_temporal = v_meta.temporal_versioning: ?, + v_props = vertex.properties: ?; + + bb0() { + rec = load v_rec; + temporal = load v_temporal; + props = load v_props; + extra = input.load! "b"; + goto bb1(rec); + }, + bb1(result) { + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("island_exit_with_live_out", report.to_string()); +} + +/// `SwitchInt` where one branch returns (stays in Postgres) and the other exits to the +/// interpreter — mixed continuation types within a single CASE tree. +#[test] +fn island_exit_switch_int() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + // Entity path loads anchor bb0 to Postgres. bb1 returns (stays in Postgres); + // bb2 does a closure apply (forces Interpreter). + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + rec: ?, temporal: ?, props: ?, cond: Bool, + func: [fn() -> ?], result: ?; + @proj v_meta = vertex.metadata: ?, + v_rec = v_meta.record_id: ?, + v_temporal = v_meta.temporal_versioning: ?, + v_props = vertex.properties: ?; + + bb0() { + rec = load v_rec; + temporal = load v_temporal; + props = load v_props; + cond = input.load! "flag"; + if cond then bb1() else bb2(); + }, + bb1() { + result = load true; + return result; + }, + bb2() { + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("island_exit_switch_int", report.to_string()); +} + +/// Goto crosses island boundary with no target arguments AND no live-out locals. +/// Tests the edge case of empty ARRAY[] literals with type casts. +#[test] +fn island_exit_empty_arrays() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + // Entity path loads anchor bb0 to Postgres, but none of its locals are used by bb1. + // bb1 starts fresh with its own closure apply (Interpreter). + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + rec: ?, temporal: ?, props: ?, + func: [fn() -> ?], result: ?; + @proj v_meta = vertex.metadata: ?, + v_rec = v_meta.record_id: ?, + v_temporal = v_meta.temporal_versioning: ?, + v_props = vertex.properties: ?; + + bb0() { + rec = load v_rec; + temporal = load v_temporal; + props = load v_props; + goto bb1(); + }, + bb1() { + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("island_exit_empty_arrays", report.to_string()); +} + +/// When the solver creates only a Postgres Data island (no exec island), the data island +/// contributes entity columns to the SELECT list but does NOT produce a continuation LATERAL. +#[test] +fn data_island_provides_without_lateral() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + // Light entity path accesses — solver puts everything on Interpreter, creating only a + // Postgres Data island for the entity columns. No Postgres exec island exists. + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + uuid: ?, func: [fn() -> ?], result: ?; + @proj v_uuid = vertex.entity_uuid: ?; + + bb0() { + uuid = load v_uuid; + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + + // No Postgres exec islands should exist — only a Data island. + let filter_report = compile_filter_islands(&fixture, &heap); + assert!( + filter_report.islands.is_empty(), + "expected no Postgres exec islands, but found {}", + filter_report.islands.len(), + ); + + // The full query should still include entity columns from the Data island's provides. + let query_report = compile_full_query(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!( + "data_island_provides_without_lateral", + query_report.to_string() + ); +} + +/// A Postgres island that provides traversal paths to a downstream interpreter island. +/// The SELECT list should include provided paths with correct joins, and the continuation +/// LATERAL should appear. +#[test] +fn provides_drives_select_and_joins() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + // bb0 accesses entity paths (Postgres-origin), then bb1 uses a closure (Interpreter). + // The Postgres island should provide the accessed paths to the Interpreter island. + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + uuid: ?, archived: ?, func: [fn() -> ?], result: ?; + @proj v_uuid = vertex.entity_uuid: ?, + v_metadata = vertex.metadata: ?, + v_archived = v_metadata.archived: ?; + + bb0() { + uuid = load v_uuid; + archived = load v_archived; + goto bb1(); + }, + bb1() { + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_full_query(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("provides_drives_select_and_joins", report.to_string()); +} + +/// Property field access: `vertex.properties.` → `json_extract_path(properties, +/// $key::text)`. Triggers `entity_editions` JOIN for the properties column. +#[test] +fn property_field_equality() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + field_val: ?, input_val: ?, result: Bool; + @proj v_props = vertex.properties: ?, + v_name = v_props.name: ?; + + bb0() { + field_val = load v_name; + input_val = input.load! "expected"; + result = bin.== field_val input_val; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("property_field_equality", report.to_string()); +} + +/// Nested property access: `vertex.properties..` → +/// `json_extract_path(properties, $key1::text, $key2::text)`. +#[test] +fn nested_property_access() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + val: ?, input_val: ?, result: Bool; + @proj v_props = vertex.properties: ?, + v_address = v_props.address: ?, + v_city = v_address.city: ?; + + bb0() { + val = load v_city; + input_val = input.load! "expected_city"; + result = bin.== val input_val; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("nested_property_access", report.to_string()); +} + +/// Link data field access: `vertex.link_data.left_entity_id.entity_uuid` → +/// LEFT OUTER JOIN on `entity_has_left_entity`, correct column reference. +#[test] +fn left_entity_filter() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + left_uuid: ?, input_id: ?, result: Bool; + @proj v_link = vertex.link_data: ?, + v_left = v_link.left_entity_id: ?, + v_left_uuid = v_left.entity_uuid: ?; + + bb0() { + left_uuid = load v_left_uuid; + input_id = input.load! "id"; + result = bin.== left_uuid input_id; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("left_entity_filter", report.to_string()); +} + +/// Property mask wraps `properties` and `property_metadata` SELECT expressions with +/// `(col - mask)` but leaves other columns untouched. +#[test] +fn property_mask() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let callee_id = DefId::new(99); + + // Properties access in bb0 (Postgres Data island) with an apply in bb1 (Interpreter) + // ensures Properties and `PropertyMetadata` appear in the provides set. + let body = body!(interner, env; [graph::read::filter]@0/2 -> ? { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + props: ?, prop_meta: ?, func: [fn() -> ?], result: ?; + @proj v_props = vertex.properties: ?, + v_meta = vertex.metadata: ?, + v_prop_meta = v_meta.property_metadata: ?; + + bb0() { + props = load v_props; + prop_meta = load v_prop_meta; + func = load callee_id; + result = apply func; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + + // Use a parameter placeholder as the mask expression. + let mask = Expression::Parameter(99); + + let report = compile_full_query_with_mask(&fixture, &heap, Some(mask)); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("property_mask", report.to_string()); +} + +/// Tuple aggregate followed by `.0` numeric field projection → +/// `json_extract_path(base, (0)::text)`. +#[test] +fn field_index_projection() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Int { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + tup: (Int, Int), result: Int; + @proj first = tup.0: Int; + + bb0() { + tup = tuple 10, 20; + result = load first; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("field_index_projection", report.to_string()); +} + +/// Struct field access using `ProjectionKind::FieldByName(symbol)` → +/// `json_extract_path(base, ($symbol)::text)`. +#[test] +fn field_by_name_projection() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Int { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + s: (x: Int, y: Int), result: Int; + @proj x_field = s.x: Int; + + bb0() { + s = struct x: 10, y: 20; + result = load x_field; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("field_by_name_projection", report.to_string()); +} + +/// Dynamic index projection where the key comes from a local. +/// `ProjectionKind::Index(local)` → `json_extract_path(base, (local_expr)::text)`. +#[test] +fn dynamic_index_projection() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let mut builder = BodyBuilder::new(&interner); + let types = TypeBuilder::synthetic(&env); + + let int_ty = types.integer(); + let unknown_ty = types.unknown(); + let entity_ty = types.opaque(sym::path::Entity, unknown_ty); + let unit_ty = types.tuple([] as [TypeId; 0]); + let list_ty = types.list(int_ty); + + let _env_local = builder.local("env", unit_ty); + let _vertex = builder.local("vertex", entity_ty); + let list = builder.local("list", list_ty); + let idx = builder.local("idx", int_ty); + let result = builder.local("result", int_ty); + + // list[idx] — Index projection + let list_at_idx = builder.place(|p| p.from(list).index(idx.local, int_ty)); + + let bb0 = builder.reserve_block([]); + + builder + .build_block(bb0) + .assign_place(list, |rv| { + let elems = [rv.const_int(10), rv.const_int(20), rv.const_int(30)]; + rv.list(elems) + }) + .assign_place(idx, |rv| { + rv.input(InputOp::Load { required: true }, "index") + }) + .assign_place(result, |rv| rv.load(list_at_idx)) + .ret(result); + + let mut body = builder.finish(2, int_ty); + body.source = Source::GraphReadFilter(hashql_hir::node::HirId::PLACEHOLDER); + body.id = DefId::new(0); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("dynamic_index_projection", report.to_string()); +} + +/// `UnOp::Neg` → `UnaryOperator::Negate` in SQL. +#[test] +fn unary_neg() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Int { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + x: Int, result: Int; + + bb0() { + x = input.load! "val"; + result = un.neg x; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("unary_neg", report.to_string()); +} + +/// `UnOp::Not` → `UnaryOperator::Not` in SQL. +#[test] +fn unary_not() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Bool { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + x: Bool, result: Bool; + + bb0() { + x = input.load! "val"; + result = un.! x; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("unary_not", report.to_string()); +} + +/// `BinOp::Sub` → `BinaryOperator::Subtract` with `::numeric` casts on both operands. +#[test] +fn binary_sub_numeric_cast() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Int { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + x: Int, y: Int, result: Int; + + bb0() { + x = input.load! "a"; + y = input.load! "b"; + result = bin.- x y; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("binary_sub_numeric_cast", report.to_string()); +} + +/// `UnOp::BitNot` → `UnaryOperator::BitwiseNot` in SQL. +#[test] +fn unary_bitnot() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Int { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + x: Int, result: Int; + + bb0() { + x = input.load! "val"; + result = un.~ x; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("unary_bitnot", report.to_string()); +} + +/// `BinOp::BitAnd` → `BinaryOperator::BitwiseAnd` with `::bigint` casts on both operands. +#[test] +fn binary_bitand_bigint_cast() { + let heap = Heap::new(); + let interner = Interner::new(&heap); + let env = Environment::new(&heap); + + let body = body!(interner, env; [graph::read::filter]@0/2 -> Int { + decl env: (), vertex: [Opaque sym::path::Entity; ?], + x: Int, y: Int, result: Int; + + bb0() { + x = input.load! "a"; + y = input.load! "b"; + result = bin.& x y; + return result; + } + }); + + let fixture = Fixture::new(&heap, env, body); + let report = compile_filter_islands(&fixture, &heap); + + let settings = snapshot_settings(); + let _guard = settings.bind_to_scope(); + assert_snapshot!("binary_bitand_bigint_cast", report.to_string()); +} diff --git a/libs/@local/hashql/eval/src/postgres/mod.rs b/libs/@local/hashql/eval/src/postgres/mod.rs new file mode 100644 index 00000000000..717c1fa1368 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/mod.rs @@ -0,0 +1,374 @@ +//! HashQL MIR → PostgreSQL `SELECT` compiler. +//! +//! This module compiles a [`GraphRead`] (a graph query with one or more filter bodies) into a +//! [`PreparedQuery`]: a [`SelectStatement`] plus a deduplicated parameter list ([`Parameters`]). +//! +//! ## Execution model: islands and continuations +//! +//! Filter bodies are compiled *island-by-island*. An *island* is a group of MIR basic blocks that +//! the execution placement pass assigned to the Postgres backend +//! ([`TargetId::Postgres`]). +//! +//! Each compiled island becomes a `CROSS JOIN LATERAL` subquery that returns a single composite +//! `continuation` value. The continuation transports control-flow information back to the +//! interpreter: +//! +//! - **`filter`** (`bool`): tri-state: `NULL` passthrough, `TRUE` keep, `FALSE` reject. +//! - **`block`** (`int`): next basic block when leaving the island. +//! - **`locals`** (`int[]`) and **`values`** (`jsonb[]`): parallel arrays carrying live-out locals. +//! +//! Continuation subqueries are forced to materialise once per row using `OFFSET 0` to prevent +//! PostgreSQL from inlining the subquery and duplicating the island's `CASE` tree per field access. +//! +//! ## Parameters and projections +//! +//! Parameters are deduplicated by identity and referenced by index (rendered as `$N` in SQL). +//! Table joins are *lazy*: the compiler only requests joins when an [`EntityPath`] is actually +//! referenced by filters or required outputs (the "provides" set). +//! +//! [`GraphRead`]: hashql_mir::body::terminator::GraphRead +//! [`TargetId::Postgres`]: hashql_mir::pass::execution::TargetId::Postgres +//! [`EntityPath`]: hashql_mir::pass::execution::traversal::EntityPath + +use core::{alloc::Allocator, fmt::Display}; + +use hash_graph_postgres_store::store::postgres::query::{ + self, Column, Expression, Identifier, SelectExpression, SelectStatement, Transpile as _, + WhereExpression, table::EntityTemporalMetadata, +}; +use hashql_core::{heap::BumpAllocator, id::Id as _}; +use hashql_mir::{ + body::{ + Body, + terminator::{GraphRead, GraphReadBody}, + }, + def::DefId, + pass::{ + analysis::dataflow::lattice::HasBottom as _, + execution::{ + IslandKind, IslandNode, TargetId, VertexType, + traversal::{EntityPath, TraversalMapLattice, TraversalPath, TraversalPathBitMap}, + }, + }, +}; + +pub use self::parameters::{ParameterIndex, Parameters, TemporalAxis}; +use self::{ + continuation::ContinuationColumn, filter::GraphReadFilterCompiler, projections::Projections, +}; +use crate::context::EvalContext; + +mod continuation; +pub(crate) mod error; +mod filter; +mod parameters; +mod projections; +mod traverse; + +/// Mutable compilation state accumulated while building a single SQL query. +/// +/// Collects deduplicated [`Parameters`], requested [`Projections`] (lazy joins driven by +/// [`EntityPath`] usage), the top-level [`WhereExpression`] (temporal constraints and continuation +/// filters), and `CROSS JOIN LATERAL` items for island continuations. +/// +/// [`EntityPath`]: hashql_mir::pass::execution::traversal::EntityPath +pub(crate) struct DatabaseContext<'heap, A: Allocator> { + pub parameters: Parameters<'heap, A>, + pub projections: Projections, + pub where_expression: WhereExpression, + pub laterals: Vec, A>, + pub continuation_aliases: Vec, +} + +impl DatabaseContext<'_, A> { + pub(crate) fn new_in(alloc: A) -> Self + where + A: Clone, + { + Self { + parameters: Parameters::new_in(alloc.clone()), + projections: Projections::new(), + where_expression: WhereExpression::default(), + laterals: Vec::new_in(alloc.clone()), + continuation_aliases: Vec::new_in(alloc), + } + } + + /// Adds temporal overlap constraints to the top-level `WHERE` clause. + /// + /// Both axes are always expressed as `&&` (range overlap) so the `GiST` index on + /// `(web_id, entity_uuid, transaction_time, decision_time)` is usable regardless of which + /// axis is pinned. + /// + /// The interpreter is responsible for binding the [`TemporalAxis`] parameters correctly: + /// - **Pinned axis:** `[timestamp, timestamp]` (degenerate single-point range, equivalent to + /// `@>`). + /// - **Variable axis:** the actual query interval. + /// + /// This avoids a `CASE`-based approach which would hide the operators from the planner and + /// prevent index scans on generic plans. + fn add_temporal_conditions(&mut self) { + let temporal_metadata = self.projections.temporal_metadata(); + + let tx_param = Expression::Parameter( + self.parameters + .temporal_axis(TemporalAxis::Transaction) + .as_usize(), + ); + let dt_param = Expression::Parameter( + self.parameters + .temporal_axis(TemporalAxis::Decision) + .as_usize(), + ); + + self.where_expression.add_condition(Expression::overlap( + Expression::ColumnReference(query::ColumnReference { + correlation: Some(temporal_metadata.clone()), + name: Column::EntityTemporalMetadata(EntityTemporalMetadata::TransactionTime) + .into(), + }), + tx_param, + )); + + self.where_expression.add_condition(Expression::overlap( + Expression::ColumnReference(query::ColumnReference { + correlation: Some(temporal_metadata), + name: Column::EntityTemporalMetadata(EntityTemporalMetadata::DecisionTime).into(), + }), + dt_param, + )); + } +} + +/// A fully-compiled SQL query ready for execution. +/// +/// Contains the typed query AST ([`SelectStatement`]) and the parameter catalog ([`Parameters`]) +/// that the interpreter uses to bind runtime values in the correct order. +pub struct PreparedQuery<'heap, A: Allocator> { + pub parameters: Parameters<'heap, A>, + pub statement: SelectStatement, +} + +impl PreparedQuery<'_, A> { + pub fn transpile(&self) -> impl Display { + core::fmt::from_fn(|fmt| self.statement.transpile(fmt)) + } +} + +/// Compiles Postgres-targeted MIR islands into a single PostgreSQL `SELECT`. +/// +/// Created per evaluation and used to compile [`GraphRead`] terminators. Compilation emits +/// diagnostics into the shared [`EvalContext`] rather than returning `Result`, so multiple +/// errors can be reported from a single compilation pass. +/// +/// [`GraphRead`]: hashql_mir::body::terminator::GraphRead +pub struct PostgresCompiler<'eval, 'ctx, 'heap, A: Allocator, S: Allocator> { + context: &'eval mut EvalContext<'ctx, 'heap, A>, + + alloc: A, + scratch: S, + + /// Pre-built expression to subtract protected property keys from JSONB columns. + /// + /// When present, `properties` and `property_metadata` `SELECT` expressions are + /// wrapped as `(column - mask)`. The caller builds this from the permission + /// system's protection rules; the compiler doesn't know about entity types + /// or actors. + property_mask: Option, +} + +impl<'eval, 'ctx, 'heap, A: Allocator, S: BumpAllocator> + PostgresCompiler<'eval, 'ctx, 'heap, A, S> +{ + pub fn new_in(context: &'eval mut EvalContext<'ctx, 'heap, A>, scratch: S) -> Self + where + A: Clone, + { + let alloc = context.alloc.clone(); + + Self { + context, + alloc, + scratch, + property_mask: None, + } + } + + /// Sets an optional JSONB key mask applied to selected property columns. + /// + /// When set, `properties` and `property_metadata` selections are wrapped as + /// `(column - mask)` to strip protected keys from the output. The compiler itself does not + /// understand permissions; the caller is responsible for building the mask. + #[must_use] + pub fn with_property_mask(mut self, property_mask: Option) -> Self { + self.property_mask = property_mask; + self + } + + /// Returns `None` for data-only islands that produce no SQL. + fn compile_graph_read_filter_island( + &mut self, + db: &mut DatabaseContext<'heap, A>, + body: &Body<'heap>, + island: &IslandNode, + provides: &mut TraversalPathBitMap, + ) -> Option { + provides.insert(island.provides()); + + // Explicit match here, because it means that we'll get a compile-time error if a new + // variant is added. + match island.kind() { + IslandKind::Exec(_) => {} + IslandKind::Data => return None, // nothing to do + } + + // TODO: we might want a longer lived graph read filter compiler here + let expression = self.scratch.scoped(|alloc| { + let mut compiler = GraphReadFilterCompiler::new(self.context, body, &alloc); + + let expression = compiler.compile_body(db, island); + let mut diagnostics = compiler.into_diagnostics(); + + self.context.diagnostics.append(&mut diagnostics); + expression + }); + + Some(expression) + } + + /// Emits a diagnostic if no island graph exists for the filter body. + fn compile_graph_read_filter( + &mut self, + db: &mut DatabaseContext<'heap, A>, + def: DefId, + provides: &mut TraversalPathBitMap, + ) { + let body = &self.context.bodies[def]; + + let Some(residual) = self.context.execution.lookup(body.id) else { + self.context + .diagnostics + .push(error::missing_island_graph(body.span)); + return; + }; + + let islands = residual.islands.find(TargetId::Postgres); + + for (island_id, island) in islands { + let Some(expression) = + self.compile_graph_read_filter_island(db, body, island, provides) + else { + continue; + }; + + let cont_alias = continuation::ContinuationAlias { + body: def, + island: island_id, + }; + let table_ref = cont_alias.table_ref(); + + // We explicitly set an OFFSET, as otherwise the postgres planner inlines the + // subquery and duplicates the CASE tree per field access, making it much more + // expensive to compute. + let subquery = SelectStatement::builder() + .selects(vec![SelectExpression::Expression { + expression, + alias: Some(ContinuationColumn::Entry.identifier()), + }]) + .offset(0) + .build(); + + let subquery = query::FromItem::Subquery { + lateral: true, + statement: Box::new(subquery), + alias: Some(table_ref.clone()), + column_alias: Vec::new(), + }; + + db.laterals.push(subquery); + db.where_expression + .add_condition(continuation::filter_condition(&table_ref)); + db.continuation_aliases.push(cont_alias); + } + } + + /// Compiles a [`GraphRead`] into a [`PreparedQuery`]. + /// + /// [`GraphRead`]: hashql_mir::body::terminator::GraphRead + pub fn compile(&mut self, read: &'ctx GraphRead<'heap>) -> PreparedQuery<'heap, A> + where + A: Clone, + { + let mut db = DatabaseContext::new_in(self.alloc.clone()); + + // Temporal conditions go first - they're always present on the base table + // and don't depend on anything the filter body produces. + db.add_temporal_conditions(); + + let mut provides = TraversalMapLattice.bottom(); + + for body in &read.body { + match body { + &GraphReadBody::Filter(def_id, _) => { + self.compile_graph_read_filter(&mut db, def_id, &mut provides); + } + } + } + + // Build SELECT list from what the interpreter needs back. + // Each EntityPath in `provides` becomes a SELECT expression via eval_entity_path, + // which also registers the necessary projection joins in DatabaseContext. + let mut select_expressions = vec![]; + + for traversal_path in provides[VertexType::Entity].iter() { + let TraversalPath::Entity(path) = traversal_path; + + let mut expression = traverse::eval_entity_path(&mut db, path); + + if matches!(path, EntityPath::Properties | EntityPath::PropertyMetadata) + && let Some(mask) = &self.property_mask + { + expression = Expression::grouped(Expression::subtract(expression, mask.clone())); + } + + let alias = Identifier::from(traversal_path.as_symbol().unwrap()); + + select_expressions.push(SelectExpression::Expression { + expression, + alias: Some(alias), + }); + } + + // Decompose each continuation LATERAL into individual columns so the + // interpreter receives flat typed values instead of Postgres composites. + // Filter is excluded; it's only used in the WHERE clause. + for &cont_alias in &db.continuation_aliases { + let table_ref = cont_alias.table_ref(); + + for field in [ + ContinuationColumn::Block, + ContinuationColumn::Locals, + ContinuationColumn::Values, + ] { + select_expressions.push(SelectExpression::Expression { + expression: continuation::field_access(&table_ref, field), + alias: Some(cont_alias.field_identifier(field)), + }); + } + } + + // Build FROM: base table + joins + CROSS JOIN LATERALs + let from = db.projections.build_from(&mut db.parameters, db.laterals); + + let query = SelectStatement::builder() + .selects(select_expressions) + .from(from) + .where_expression(db.where_expression) + .build(); + + PreparedQuery { + parameters: db.parameters, + statement: query, + } + } +} diff --git a/libs/@local/hashql/eval/src/postgres/parameters.rs b/libs/@local/hashql/eval/src/postgres/parameters.rs new file mode 100644 index 00000000000..11d4fbcd981 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/parameters.rs @@ -0,0 +1,245 @@ +//! Deduplicated SQL parameter management. +//! +//! The compiler allocates parameters through typed constructors on [`Parameters`]. Each distinct +//! logical value (input binding, constant, temporal axis, etc.) is assigned a stable +//! [`ParameterIndex`], and identical requests yield the same index. The interpreter later uses +//! the reverse mapping to bind runtime values in the correct `$N` order. + +use alloc::alloc::Global; +use core::{alloc::Allocator, fmt}; + +use hash_graph_postgres_store::store::postgres::query::Expression; +use hashql_core::{ + collections::{FastHashMap, fast_hash_map_in}, + id::{self, Id as _, IdVec}, + symbol::Symbol, + value::Primitive, +}; +use hashql_mir::{body::place::FieldIndex, interpret::value::Int}; + +id::newtype!( + /// Index of a SQL parameter in the compiled query, rendered as `$N` by the SQL formatter. + pub struct ParameterIndex(u32 is 0..=u32::MAX) +); + +impl From for Expression { + fn from(value: ParameterIndex) -> Self { + Self::Parameter(value.as_usize()) + } +} + +/// Interned identity for a SQL parameter. +/// +/// Parameters are deduplicated by this key so multiple occurrences of the same logical value +/// (e.g. the same input symbol) share one `$N` placeholder. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +enum Parameter<'heap> { + /// A user-provided input binding. + Input(Symbol<'heap>), + /// An integer constant that does not fit in a `u32`. + Int(Int), + /// A primitive constant value (string, bool, etc.). + Primitive(Primitive<'heap>), + /// A symbol used as a JSON object key in SQL expressions. + Symbol(Symbol<'heap>), + /// A captured-environment field access. + Env(FieldIndex), + /// Temporal axis range provided by the interpreter at execution time. + /// + /// The interpreter binds these based on the user's temporal axes configuration: + /// pinned axis gets a degenerate `[ts, ts]` range, variable axis gets the query + /// interval. Both use `&&` so the `GiST` index is always usable. + TemporalAxis(TemporalAxis), +} + +impl fmt::Display for Parameter<'_> { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Input(symbol) => write!(fmt, "Input({symbol})"), + Self::Int(int) => write!(fmt, "Int({int})"), + Self::Primitive(primitive) => write!(fmt, "Primitive({primitive})"), + Self::Symbol(symbol) => write!(fmt, "Symbol({symbol})"), + Self::Env(field) => write!(fmt, "Env(#{})", field.as_u32()), + Self::TemporalAxis(axis) => write!(fmt, "TemporalAxis({axis})"), + } + } +} + +/// Selects which temporal axis a parameter range applies to. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum TemporalAxis { + /// Transaction-time axis. + Transaction, + /// Decision-time axis. + Decision, +} + +impl fmt::Display for TemporalAxis { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Transaction => fmt.write_str("Transaction"), + Self::Decision => fmt.write_str("Decision"), + } + } +} + +/// Deduplicating parameter catalog for a compiled query. +/// +/// The compiler requests parameters through typed constructors. Each request returns a stable +/// [`ParameterIndex`], and identical requests yield the same index. +/// +/// The interpreter uses the reverse mapping to bind runtime values in the correct order. +pub struct Parameters<'heap, A: Allocator = Global> { + lookup: FastHashMap, ParameterIndex, A>, + reverse: IdVec, A>, +} + +impl<'heap, A: Allocator> Parameters<'heap, A> { + pub(crate) fn new_in(alloc: A) -> Self + where + A: Clone, + { + Self { + lookup: fast_hash_map_in(alloc.clone()), + reverse: IdVec::new_in(alloc), + } + } + + fn get_or_insert(&mut self, param: Parameter<'heap>) -> ParameterIndex { + *self + .lookup + .entry(param) + .or_insert_with(|| self.reverse.push(param)) + } + + pub(crate) fn input(&mut self, name: Symbol<'heap>) -> ParameterIndex { + self.get_or_insert(Parameter::Input(name)) + } + + /// Allocates a parameter for a symbol used as a JSON object key in SQL expressions. + pub(crate) fn symbol(&mut self, name: Symbol<'heap>) -> ParameterIndex { + self.get_or_insert(Parameter::Symbol(name)) + } + + pub(crate) fn int(&mut self, value: Int) -> ParameterIndex { + self.get_or_insert(Parameter::Int(value)) + } + + pub(crate) fn primitive(&mut self, primitive: Primitive<'heap>) -> ParameterIndex { + self.get_or_insert(Parameter::Primitive(primitive)) + } + + pub(crate) fn env(&mut self, field: FieldIndex) -> ParameterIndex { + self.get_or_insert(Parameter::Env(field)) + } + + pub(crate) fn temporal_axis(&mut self, axis: TemporalAxis) -> ParameterIndex { + self.get_or_insert(Parameter::TemporalAxis(axis)) + } + + /// Returns the number of distinct parameters allocated so far. + pub fn len(&self) -> usize { + self.reverse.len() + } + + /// Returns `true` if no parameters have been allocated. + pub fn is_empty(&self) -> bool { + self.reverse.is_empty() + } +} + +impl fmt::Display for Parameters<'_, A> { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + for (index, param) in self.reverse.iter().enumerate() { + if index > 0 { + fmt.write_str("\n")?; + } + write!(fmt, "${index}: {param}")?; + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + #![expect(clippy::min_ident_chars)] + use alloc::alloc::Global; + + use hashql_core::{ + heap::Heap, + value::{Primitive, String}, + }; + use hashql_mir::{body::place::FieldIndex, interpret::value::Int}; + + use super::{Parameters, TemporalAxis}; + + #[test] + fn input_dedup() { + let heap = Heap::new(); + let sym = heap.intern_symbol("x"); + + let mut params = Parameters::new_in(Global); + let a = params.input(sym); + let b = params.input(sym); + + assert_eq!(a, b); + assert_eq!(params.len(), 1); + } + + #[test] + fn category_isolation() { + let heap = Heap::new(); + let sym = heap.intern_symbol("x"); + + let mut params = Parameters::new_in(Global); + let input_idx = params.input(sym); + let symbol_idx = params.symbol(sym); + + assert_ne!(input_idx, symbol_idx); + assert_eq!(params.len(), 2); + } + + #[test] + fn temporal_axis_stable() { + let mut params = Parameters::new_in(Global); + let a = params.temporal_axis(TemporalAxis::Transaction); + let b = params.temporal_axis(TemporalAxis::Transaction); + + assert_eq!(a, b); + assert_eq!(params.len(), 1); + } + + #[test] + fn int_dedup() { + let mut params = Parameters::new_in(Global); + let a = params.int(Int::from(42_i128)); + let b = params.int(Int::from(42_i128)); + + assert_eq!(a, b); + assert_eq!(params.len(), 1); + } + + #[test] + fn primitive_dedup() { + let heap = Heap::new(); + let string = String::new(heap.intern_symbol("hello")); + + let mut params = Parameters::new_in(Global); + let a = params.primitive(Primitive::String(string)); + let b = params.primitive(Primitive::String(string)); + + assert_eq!(a, b); + assert_eq!(params.len(), 1); + } + + #[test] + fn env_dedup() { + let mut params = Parameters::new_in(Global); + let a = params.env(FieldIndex::new(0)); + let b = params.env(FieldIndex::new(0)); + + assert_eq!(a, b); + assert_eq!(params.len(), 1); + } +} diff --git a/libs/@local/hashql/eval/src/postgres/projections.rs b/libs/@local/hashql/eval/src/postgres/projections.rs new file mode 100644 index 00000000000..aa694d81fc2 --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/projections.rs @@ -0,0 +1,406 @@ +//! Lazy join planner for entity-backed SQL queries. +//! +//! See [`Projections`] for the main entry point. + +use core::alloc::Allocator; + +use hash_graph_postgres_store::store::postgres::query::{ + self, Alias, Column, ColumnName, ColumnReference, ForeignKeyReference, FromItem, Identifier, + JoinType, SelectExpression, SelectStatement, Table, TableName, TableReference, table, +}; +use hashql_core::symbol::sym; + +use super::Parameters; + +/// Computed columns not directly backed by a single table column. +enum ComputedColumn { + /// Aggregated JSONB array of entity type IDs, produced by a `LEFT JOIN LATERAL` subquery. + EntityTypeIds, +} + +impl From for ColumnName<'_> { + fn from(value: ComputedColumn) -> Self { + match value { + ComputedColumn::EntityTypeIds => ColumnName::from(Identifier::from("entity_type_ids")), + } + } +} + +/// Lazy join planner for entity-backed SQL queries. +/// +/// Accessors like [`Self::entity_editions`] register that a table is needed and return a +/// reference to it. The actual `FROM` tree is built once at the end via [`Self::build_from`]. +pub(crate) struct Projections { + index: usize, + + /// Always present as the base table; everything joins through it. + base_alias: Alias, + + entity_editions: Option, + entity_ids: Option, + entity_type_ids: Option, + left: Option, + right: Option, +} + +impl Projections { + pub(crate) const fn new() -> Self { + let mut index = 0; + let base_alias = Self::next_alias(&mut index); + + Self { + index, + base_alias, + entity_editions: None, + entity_ids: None, + entity_type_ids: None, + left: None, + right: None, + } + } + + const fn next_alias(index: &mut usize) -> Alias { + let alias = Alias { + condition_index: 0, + chain_depth: 0, + number: *index, + }; + + *index += 1; + + alias + } + + pub(crate) fn entity_editions(&mut self) -> TableReference<'static> { + let alias = *self + .entity_editions + .get_or_insert_with(|| Self::next_alias(&mut self.index)); + + TableReference { + schema: None, + name: TableName::from(Table::EntityEditions), + alias: Some(alias), + } + } + + /// Returns the base table reference, which is always present (no lazy join). + pub(crate) fn temporal_metadata(&self) -> TableReference<'static> { + TableReference { + schema: None, + name: TableName::from(Table::EntityTemporalMetadata), + alias: Some(self.base_alias), + } + } + + pub(crate) fn entity_ids(&mut self) -> TableReference<'static> { + let alias = *self + .entity_ids + .get_or_insert_with(|| Self::next_alias(&mut self.index)); + + TableReference { + schema: None, + name: TableName::from(Table::EntityIds), + alias: Some(alias), + } + } + + /// Unlike other accessors this returns a [`ColumnReference`]: entity type IDs are a computed + /// column produced by a `LEFT JOIN LATERAL` subquery, not a direct table column. + pub(crate) fn entity_type_ids(&mut self) -> ColumnReference<'static> { + let alias = *self + .entity_type_ids + .get_or_insert_with(|| Self::next_alias(&mut self.index)); + + ColumnReference { + correlation: Some(TableReference { + schema: None, + name: TableName::from(Table::EntityIsOfTypeIds), + alias: Some(alias), + }), + name: ComputedColumn::EntityTypeIds.into(), + } + } + + pub(crate) fn left_entity(&mut self) -> TableReference<'static> { + let alias = *self + .left + .get_or_insert_with(|| Self::next_alias(&mut self.index)); + + TableReference { + schema: None, + name: TableName::from(Table::EntityHasLeftEntity), + alias: Some(alias), + } + } + + pub(crate) fn right_entity(&mut self) -> TableReference<'static> { + let alias = *self + .right + .get_or_insert_with(|| Self::next_alias(&mut self.index)); + + TableReference { + schema: None, + name: TableName::from(Table::EntityHasRightEntity), + alias: Some(alias), + } + } + + /// Builds the FROM clause with all joins that were requested during compilation. + /// + /// `entity_temporal_metadata` is always the base table. Other tables are joined + /// conditionally based on which paths the filter body and provides set touched. + /// CROSS JOIN LATERALs for continuation subqueries are appended last. + pub(crate) fn build_from( + &self, + parameters: &mut Parameters<'_, impl Allocator>, + laterals: Vec, impl Allocator>, + ) -> FromItem<'static> { + let base = FromItem::table(Table::EntityTemporalMetadata) + .alias(TableReference { + schema: None, + name: TableName::from(Table::EntityTemporalMetadata), + alias: Some(self.base_alias), + }) + .build(); + + let mut from = base; + + // entity_editions ON edition_id (INNER) + if let Some(alias) = self.entity_editions { + from = self.build_entity_editions(from, alias); + } + + // entity_ids ON (web_id, entity_uuid) (INNER) + if let Some(alias) = self.entity_ids { + from = self.build_entity_ids(from, alias); + } + + // entity_type_ids: self-contained LATERAL that joins entity_is_of_type_ids + // internally, unnests the parallel arrays, and aggregates into a JSONB array. + // + // LEFT JOIN LATERAL ( + // SELECT jsonb_agg(jsonb_build_object($base_url, u."b", $version, u."v")) + // AS "entity_type_ids" + // FROM "entity_is_of_type_ids" AS "eit" + // CROSS JOIN LATERAL UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") + // WHERE "eit"."entity_edition_id" = "base"."entity_edition_id" + // ) AS ON TRUE + if let Some(alias) = self.entity_type_ids { + from = self.build_entity_type_ids(parameters, from, alias); + } + + // entity_has_left_entity ON (web_id, entity_uuid) (LEFT OUTER) + if let Some(alias) = self.left { + from = self.build_entity_has_left_entity(from, alias); + } + + // entity_has_right_entity ON (web_id, entity_uuid) (LEFT OUTER) + if let Some(alias) = self.right { + from = self.build_entity_has_right_entity(from, alias); + } + + // CROSS JOIN LATERALs for continuation subqueries (must come after + // all regular joins since they may reference any of the joined tables) + for lateral in laterals { + from = from.cross_join(lateral); + } + + from + } + + fn build_entity_editions<'item>(&self, from: FromItem<'item>, alias: Alias) -> FromItem<'item> { + let fk = ForeignKeyReference::Single { + on: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EditionId), + join: Column::EntityEditions(table::EntityEditions::EditionId), + join_type: JoinType::Inner, + }; + + from.join( + JoinType::Inner, + FromItem::table(Table::EntityEditions).alias(Table::EntityEditions.aliased(alias)), + ) + .on(fk.conditions(self.base_alias, alias)) + .build() + } + + fn build_entity_ids<'item>(&self, from: FromItem<'item>, alias: Alias) -> FromItem<'item> { + let fk = ForeignKeyReference::Double { + on: [ + Column::EntityTemporalMetadata(table::EntityTemporalMetadata::WebId), + Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EntityUuid), + ], + join: [ + Column::EntityIds(table::EntityIds::WebId), + Column::EntityIds(table::EntityIds::EntityUuid), + ], + join_type: JoinType::Inner, + }; + + from.join( + JoinType::Inner, + FromItem::table(Table::EntityIds).alias(Table::EntityIds.aliased(alias)), + ) + .on(fk.conditions(self.base_alias, alias)) + .build() + } + + fn build_entity_type_ids<'item>( + &self, + parameters: &mut Parameters<'_, impl Allocator>, + from: FromItem<'item>, + alias: Alias, + ) -> FromItem<'item> { + let eit_ref = TableReference { + schema: None, + name: TableName::from(Identifier::from("eit")), + alias: None, + }; + + let inner_from = FromItem::table(Table::EntityIsOfTypeIds) + .alias(TableReference { + schema: None, + name: TableName::from(Identifier::from("eit")), + alias: None, + }) + .build() + .cross_join(FromItem::Function { + lateral: false, + function: query::Function::Unnest(vec![ + query::Expression::ColumnReference(ColumnReference { + correlation: Some(eit_ref.clone()), + name: Column::EntityIsOfTypeIds(table::EntityIsOfTypeIds::BaseUrls).into(), + }), + query::Expression::ColumnReference(ColumnReference { + correlation: Some(eit_ref), + name: Column::EntityIsOfTypeIds(table::EntityIsOfTypeIds::Versions).into(), + }), + ]), + with_ordinality: false, + alias: Some(TableReference { + schema: None, + name: TableName::from(Identifier::from("u")), + alias: None, + }), + column_alias: vec![ + ColumnName::from(Identifier::from("b")), + ColumnName::from(Identifier::from("v")), + ], + }); + + // WHERE "eit"."entity_edition_id" = "base"."entity_edition_id" + let correlation = query::Expression::equal( + query::Expression::ColumnReference(ColumnReference { + correlation: Some(TableReference { + schema: None, + name: TableName::from(Identifier::from("eit")), + alias: None, + }), + name: Column::EntityIsOfType(table::EntityIsOfType::EntityEditionId, None).into(), + }), + query::Expression::ColumnReference(ColumnReference { + correlation: Some(self.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EditionId) + .into(), + }), + ); + + let subquery = SelectStatement::builder() + .selects(vec![SelectExpression::Expression { + expression: query::Expression::Function(query::Function::JsonAgg(Box::new( + query::Expression::Function(query::Function::JsonBuildObject(vec![ + ( + parameters.symbol(sym::base_url).into(), + query::Expression::ColumnReference(ColumnReference { + correlation: None, + name: ColumnName::from(Identifier::from("b")), + }), + ), + ( + parameters.symbol(sym::version).into(), + query::Expression::ColumnReference(ColumnReference { + correlation: None, + name: ColumnName::from(Identifier::from("v")), + }), + ), + ])), + ))), + alias: Some(Identifier::from("entity_type_ids")), + }]) + .from(inner_from) + .where_expression({ + let mut w = query::WhereExpression::default(); + w.add_condition(correlation); + w + }) + .build(); + + let lateral = query::FromItem::Subquery { + lateral: true, + statement: Box::new(subquery), + alias: Some(TableReference { + schema: None, + name: TableName::from(Table::EntityIsOfTypeIds), + alias: Some(alias), + }), + column_alias: vec![], + }; + + from.join(JoinType::LeftOuter, lateral) + .on(vec![query::Expression::Constant(query::Constant::Boolean( + true, + ))]) + .build() + } + + fn build_entity_has_right_entity<'item>( + &self, + from: FromItem<'item>, + alias: Alias, + ) -> FromItem<'item> { + let fk = ForeignKeyReference::Double { + on: [ + Column::EntityTemporalMetadata(table::EntityTemporalMetadata::WebId), + Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EntityUuid), + ], + join: [ + Column::EntityHasRightEntity(table::EntityHasRightEntity::WebId), + Column::EntityHasRightEntity(table::EntityHasRightEntity::EntityUuid), + ], + join_type: JoinType::LeftOuter, + }; + + from.join( + JoinType::LeftOuter, + FromItem::table(Table::EntityHasRightEntity) + .alias(Table::EntityHasRightEntity.aliased(alias)), + ) + .on(fk.conditions(self.base_alias, alias)) + .build() + } + + fn build_entity_has_left_entity<'item>( + &self, + from: FromItem<'item>, + alias: Alias, + ) -> FromItem<'item> { + let fk = ForeignKeyReference::Double { + on: [ + Column::EntityTemporalMetadata(table::EntityTemporalMetadata::WebId), + Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EntityUuid), + ], + join: [ + Column::EntityHasLeftEntity(table::EntityHasLeftEntity::WebId), + Column::EntityHasLeftEntity(table::EntityHasLeftEntity::EntityUuid), + ], + join_type: JoinType::LeftOuter, + }; + + from.join( + JoinType::LeftOuter, + FromItem::table(Table::EntityHasLeftEntity) + .alias(Table::EntityHasLeftEntity.aliased(alias)), + ) + .on(fk.conditions(self.base_alias, alias)) + .build() + } +} diff --git a/libs/@local/hashql/eval/src/postgres/traverse.rs b/libs/@local/hashql/eval/src/postgres/traverse.rs new file mode 100644 index 00000000000..0413897bf8e --- /dev/null +++ b/libs/@local/hashql/eval/src/postgres/traverse.rs @@ -0,0 +1,150 @@ +//! Mapping from logical entity fields to physical PostgreSQL columns. +//! +//! This module contains [`eval_entity_path`], the single translation table between +//! [`EntityPath`] values (used by MIR traversal analysis) and the physical Postgres schema +//! (spanning `entity_temporal_metadata`, `entity_editions`, `entity_ids`, and edge tables). + +use core::alloc::Allocator; + +use hash_graph_postgres_store::store::postgres::query::{ + self, Column, ColumnReference, Expression, table, +}; +use hashql_core::symbol::sym; +use hashql_mir::pass::execution::traversal::EntityPath; + +use super::DatabaseContext; + +/// Lowers an [`EntityPath`] to a SQL [`Expression`], requesting joins and allocating parameters +/// as needed. +/// +/// Composite paths (e.g. [`EntityPath::RecordId`]) are assembled recursively from their +/// constituent columns. +#[expect(clippy::too_many_lines, reason = "match statement")] +pub(crate) fn eval_entity_path( + db: &mut DatabaseContext<'_, A>, + path: EntityPath, +) -> Expression { + match path { + EntityPath::Properties => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.entity_editions()), + name: Column::EntityEditions(table::EntityEditions::Properties).into(), + }), + EntityPath::Vectors => unreachable!("embeddings are not supported in postgres"), + EntityPath::RecordId => Expression::Function(query::Function::JsonBuildObject(vec![ + ( + db.parameters.symbol(sym::entity_id).into(), + eval_entity_path(db, EntityPath::EntityId), + ), + ( + db.parameters.symbol(sym::draft_id).into(), + eval_entity_path(db, EntityPath::DraftId), + ), + ])), + EntityPath::EntityId => Expression::Function(query::Function::JsonBuildObject(vec![ + ( + db.parameters.symbol(sym::web_id).into(), + eval_entity_path(db, EntityPath::WebId), + ), + ( + db.parameters.symbol(sym::entity_uuid).into(), + eval_entity_path(db, EntityPath::EntityUuid), + ), + ( + db.parameters.symbol(sym::draft_id).into(), + eval_entity_path(db, EntityPath::DraftId), + ), + ])), + EntityPath::WebId => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::WebId).into(), + }), + EntityPath::EntityUuid => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EntityUuid).into(), + }), + EntityPath::DraftId => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::DraftId).into(), + }), + EntityPath::EditionId => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::EditionId).into(), + }), + EntityPath::TemporalVersioning => { + Expression::Function(query::Function::JsonBuildObject(vec![ + ( + db.parameters.symbol(sym::decision_time).into(), + eval_entity_path(db, EntityPath::DecisionTime), + ), + ( + db.parameters.symbol(sym::transaction_time).into(), + eval_entity_path(db, EntityPath::TransactionTime), + ), + ])) + } + EntityPath::DecisionTime => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::DecisionTime) + .into(), + }), + EntityPath::TransactionTime => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.temporal_metadata()), + name: Column::EntityTemporalMetadata(table::EntityTemporalMetadata::TransactionTime) + .into(), + }), + EntityPath::EntityTypeIds => Expression::ColumnReference(db.projections.entity_type_ids()), + EntityPath::Archived => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.entity_editions()), + name: Column::EntityEditions(table::EntityEditions::Archived).into(), + }), + EntityPath::Confidence => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.entity_editions()), + name: Column::EntityEditions(table::EntityEditions::Confidence).into(), + }), + EntityPath::ProvenanceInferred => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.entity_ids()), + name: Column::EntityIds(table::EntityIds::Provenance).into(), + }), + EntityPath::ProvenanceEdition => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.entity_editions()), + name: Column::EntityEditions(table::EntityEditions::Provenance).into(), + }), + EntityPath::PropertyMetadata => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.entity_editions()), + name: Column::EntityEditions(table::EntityEditions::PropertyMetadata).into(), + }), + EntityPath::LeftEntityWebId => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.left_entity()), + name: Column::EntityHasLeftEntity(table::EntityHasLeftEntity::LeftEntityWebId).into(), + }), + EntityPath::LeftEntityUuid => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.left_entity()), + name: Column::EntityHasLeftEntity(table::EntityHasLeftEntity::LeftEntityUuid).into(), + }), + EntityPath::RightEntityWebId => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.right_entity()), + name: Column::EntityHasRightEntity(table::EntityHasRightEntity::RightEntityWebId) + .into(), + }), + EntityPath::RightEntityUuid => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.right_entity()), + name: Column::EntityHasRightEntity(table::EntityHasRightEntity::RightEntityUuid).into(), + }), + EntityPath::LeftEntityConfidence => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.left_entity()), + name: Column::EntityHasLeftEntity(table::EntityHasLeftEntity::Confidence).into(), + }), + EntityPath::RightEntityConfidence => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.right_entity()), + name: Column::EntityHasRightEntity(table::EntityHasRightEntity::Confidence).into(), + }), + EntityPath::LeftEntityProvenance => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.left_entity()), + name: Column::EntityHasLeftEntity(table::EntityHasLeftEntity::Provenance).into(), + }), + EntityPath::RightEntityProvenance => Expression::ColumnReference(ColumnReference { + correlation: Some(db.projections.right_entity()), + name: Column::EntityHasRightEntity(table::EntityHasRightEntity::Provenance).into(), + }), + } +} diff --git a/libs/@local/hashql/eval/tests/ui/postgres/.spec.toml b/libs/@local/hashql/eval/tests/ui/postgres/.spec.toml new file mode 100644 index 00000000000..6835ac74157 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/.spec.toml @@ -0,0 +1 @@ +suite = "eval/postgres" diff --git a/libs/@local/hashql/eval/tests/ui/postgres/arithmetic-addition-casts.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/arithmetic-addition-casts.jsonc new file mode 100644 index 00000000000..5ee3aeccee6 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/arithmetic-addition-casts.jsonc @@ -0,0 +1,17 @@ +//@ run: skip reason="::core::math::add intrinsic not supported in specialization yet (H-4728)" +//@ description: Arithmetic addition casts both operands to ::numeric, outer comparison does not +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + [">", + ["+", + ["input", "x", "Integer"], + ["input", "y", "Integer"] + ], + { "#literal": 0 } + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.aux.mir new file mode 100644 index 00000000000..98d2753e013 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.aux.mir @@ -0,0 +1,76 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> Integer { + let %0: Integer + + bb0(): { + %0 = input LOAD x + + return %0 + } +} + +thunk {thunk#3}() -> Integer { + let %0: Integer + + bb0(): { + %0 = input LOAD y + + return %0 + } +} + +thunk {thunk#4}() -> Boolean { + let %0: Boolean + let %1: Integer + let %2: Integer + + bb0(): { + %2 = input LOAD x + %1 = input LOAD y + %0 = %2 > %1 + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: Integer + let %4: Integer + + bb0(): { // postgres + %4 = input LOAD x + %3 = input LOAD y + %2 = %4 > %3 + + return %2 + } +} + +thunk {thunk#5}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.jsonc new file mode 100644 index 00000000000..4c26682dda2 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Comparison operators produce no type cast on operands +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + [">", + ["input", "x", "Integer"], + ["input", "y", "Integer"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout new file mode 100644 index 00000000000..6550385338f --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout @@ -0,0 +1,14 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW($2 > $3, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_4_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(x) +$3: Input(y) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.aux.mir new file mode 100644 index 00000000000..76a15d5fcc5 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.aux.mir @@ -0,0 +1,34 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + bb0(): { // interpreter + return 1 + } +} + +thunk {thunk#2}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.jsonc new file mode 100644 index 00000000000..84ff9d1b597 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.jsonc @@ -0,0 +1,11 @@ +//@ run: pass +//@ description: Simplest filter — constant true, no branching +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + {"#literal": true} + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout new file mode 100644 index 00000000000..e6a63637097 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout @@ -0,0 +1,10 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.aux.mir new file mode 100644 index 00000000000..7c78cd33883 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.aux.mir @@ -0,0 +1,80 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD k + + return %0 + } +} + +thunk {thunk#3}() -> ::graph::types::principal::actor_group::web::WebId { + let %0: ::graph::types::principal::actor_group::web::WebId + + bb0(): { + %0 = input LOAD v + + return %0 + } +} + +thunk {thunk#5}() -> Dict<::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId> { + let %0: Dict<::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId> + let %1: ::graph::types::principal::actor_group::web::WebId + let %2: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %2 = input LOAD k + %1 = input LOAD v + %0 = dict(%2: %1) + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Dict<::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId> + let %3: Boolean + let %4: Dict<::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId> + let %5: ::graph::types::principal::actor_group::web::WebId + let %6: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %2 = dict(%1.metadata.record_id.entity_id.entity_uuid: %1.metadata.record_id.entity_id.web_id) + %6 = input LOAD k + %5 = input LOAD v + %4 = dict(%6: %5) + %3 = %2 == %4 + + return %3 + } +} + +thunk {thunk#7}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.jsonc new file mode 100644 index 00000000000..5af9acb4494 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.jsonc @@ -0,0 +1,20 @@ +//@ run: pass +//@ description: Dict aggregate from entity fields - AggregateKind::Dict → jsonb_build_object with key-value pairs +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + { "#dict": [ + ["vertex.metadata.record_id.entity_id.entity_uuid", + "vertex.metadata.record_id.entity_id.web_id"] + ]}, + { "#dict": [ + [["input", "k", "::graph::types::knowledge::entity::EntityUuid"], + ["input", "v", "::graph::types::principal::actor_group::web::WebId"]] + ]} + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout new file mode 100644 index 00000000000..e491f375159 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout @@ -0,0 +1,14 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_object("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $3), NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_4_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(k) +$3: Input(v) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.aux.mir new file mode 100644 index 00000000000..e5c9cdc1591 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.aux.mir @@ -0,0 +1,38 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + + bb0(): { // postgres + %2 = !%1.metadata.archived + + return %2 + } +} + +thunk {thunk#3}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.jsonc new file mode 100644 index 00000000000..616a701a064 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Entity archived filter - entity_editions join triggered by metadata.archived access +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.archived", + { "#literal": false } + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout new file mode 100644 index 00000000000..957721b893b --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout @@ -0,0 +1,14 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_1_0"."row")."block" AS "continuation_1_0_block", ("continuation_1_0"."row")."locals" AS "continuation_1_0_locals", ("continuation_1_0"."row")."values" AS "continuation_1_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +INNER JOIN "entity_editions" AS "entity_editions_0_0_1" + ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" +CROSS JOIN LATERAL (SELECT (ROW(NOT("entity_editions_0_0_1"."archived"), NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_1_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_1_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.aux.mir new file mode 100644 index 00000000000..12741a18694 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.aux.mir @@ -0,0 +1,50 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::knowledge::entity::DraftId { + let %0: ::graph::types::knowledge::entity::DraftId + + bb0(): { + %0 = input LOAD draft + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::knowledge::entity::DraftId + + bb0(): { // interpreter + %3 = input LOAD draft + %2 = %1.metadata.record_id.entity_id.draft_id == %3 + + return %2 + } +} + +thunk {thunk#4}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.jsonc new file mode 100644 index 00000000000..3cfc3bbe829 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Entity DraftId equality - entity path resolution for draft_id column +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.draft_id", + ["input", "draft", "::graph::types::knowledge::entity::DraftId"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout new file mode 100644 index 00000000000..06893ece76f --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout @@ -0,0 +1,10 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT "entity_temporal_metadata_0_0_0"."draft_id" AS "draft_id" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.aux.mir new file mode 100644 index 00000000000..2c63fe041d6 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.aux.mir @@ -0,0 +1,50 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> List<::graph::types::ontology::VersionedUrl> { + let %0: List<::graph::types::ontology::VersionedUrl> + + bb0(): { + %0 = input LOAD expected_types + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: List<::graph::types::ontology::VersionedUrl> + + bb0(): { // postgres + %3 = input LOAD expected_types + %2 = %1.metadata.entity_type_ids == %3 + + return %2 + } +} + +thunk {thunk#4}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.jsonc new file mode 100644 index 00000000000..bb644afcc21 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Entity type IDs access triggers LEFT JOIN LATERAL subquery with unnest + jsonb_agg +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.entity_type_ids", + ["input", "expected_types", "List<::graph::types::ontology::VersionedUrl>"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout new file mode 100644 index 00000000000..96bf4b6b865 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout @@ -0,0 +1,20 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($3, "b", $4, "v")) AS "entity_type_ids" +FROM "entity_is_of_type_ids" AS "eit" +CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") +WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_1" + ON TRUE +CROSS JOIN LATERAL (SELECT (ROW("entity_is_of_type_ids_0_0_1"."entity_type_ids" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(expected_types) +$3: Symbol(base_url) +$4: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.aux.mir new file mode 100644 index 00000000000..8c05e0ee2bc --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.aux.mir @@ -0,0 +1,104 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +fn {ctor#::core::uuid::Uuid}(%0: (), %1: String) -> ::core::uuid::Uuid { + let %2: ::core::uuid::Uuid + + bb0(): { + %2 = opaque(::core::uuid::Uuid, %1) + + return %2 + } +} + +thunk {thunk#2}() -> (String) -> ::core::uuid::Uuid { + let %0: (String) -> ::core::uuid::Uuid + + bb0(): { + %0 = closure(({ctor#::core::uuid::Uuid} as FnPtr), ()) + + return %0 + } +} + +thunk {thunk#3}() -> ::core::uuid::Uuid { + let %0: ::core::uuid::Uuid + + bb0(): { + %0 = opaque(::core::uuid::Uuid, "e2851dbb-7376-4959-9bca-f72cafc4448f") + + return %0 + } +} + +fn {ctor#::graph::types::knowledge::entity::EntityUuid}(%0: (), %1: ::core::uuid::Uuid) -> ::graph::types::knowledge::entity::EntityUuid { + let %2: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %2 = opaque(::graph::types::knowledge::entity::EntityUuid, %1) + + return %2 + } +} + +thunk {thunk#4}() -> (::core::uuid::Uuid) -> ::graph::types::knowledge::entity::EntityUuid { + let %0: (::core::uuid::Uuid) -> ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = closure(({ctor#::graph::types::knowledge::entity::EntityUuid} as FnPtr), ()) + + return %0 + } +} + +thunk {thunk#5}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::core::uuid::Uuid + let %1: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = opaque(::core::uuid::Uuid, "e2851dbb-7376-4959-9bca-f72cafc4448f") + %1 = opaque(::graph::types::knowledge::entity::EntityUuid, %0) + + return %1 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::core::uuid::Uuid + let %4: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %3 = opaque(::core::uuid::Uuid, "e2851dbb-7376-4959-9bca-f72cafc4448f") + %4 = opaque(::graph::types::knowledge::entity::EntityUuid, %3) + %2 = %1.metadata.record_id.entity_id.entity_uuid == %4 + + return %2 + } +} + +thunk {thunk#7}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.jsonc new file mode 100644 index 00000000000..9f85cfe0c01 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.jsonc @@ -0,0 +1,16 @@ +//@ run: pass +//@ description: Entity UUID equality - entity path resolution + comparison without cast +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["::graph::types::knowledge::entity::EntityUuid", + ["::core::uuid::Uuid", { "#literal": "e2851dbb-7376-4959-9bca-f72cafc4448f" }] + ] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout new file mode 100644 index 00000000000..56444169f78 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_7_0"."row")."block" AS "continuation_7_0_block", ("continuation_7_0"."row")."locals" AS "continuation_7_0_locals", ("continuation_7_0"."row")."values" AS "continuation_7_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_7_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_7_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Primitive("e2851dbb-7376-4959-9bca-f72cafc4448f") diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.aux.mir new file mode 100644 index 00000000000..6ebcab406ec --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.aux.mir @@ -0,0 +1,50 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::principal::actor_group::web::WebId { + let %0: ::graph::types::principal::actor_group::web::WebId + + bb0(): { + %0 = input LOAD web + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::principal::actor_group::web::WebId + + bb0(): { // postgres + %3 = input LOAD web + %2 = %1.metadata.record_id.entity_id.web_id == %3 + + return %2 + } +} + +thunk {thunk#4}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.jsonc new file mode 100644 index 00000000000..aefc138f4c8 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Entity WebId equality - entity path resolution for web_id column +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.web_id", + ["input", "web", "::graph::types::principal::actor_group::web::WebId"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout new file mode 100644 index 00000000000..b96be9a8563 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."web_id" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(web) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.aux.mir new file mode 100644 index 00000000000..5af838ddd8a --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.aux.mir @@ -0,0 +1,36 @@ +fn {graph::read::filter@11}(%0: (::graph::types::knowledge::entity::EntityUuid,), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + + bb0(): { // postgres + %2 = %1.metadata.record_id.entity_id.entity_uuid == %0.0 + + return %2 + } +} + +fn {graph::read::filter@27}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + bb0(): { // interpreter + return 1 + } +} + +thunk {thunk#8}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: ::graph::types::knowledge::entity::EntityUuid + let %1: ::graph::TimeAxis + let %2: List<::graph::types::knowledge::entity::Entity> + let %3: (::graph::types::knowledge::entity::EntityUuid,) + + bb0(): { + %0 = input LOAD target + %1 = input LOAD time_axis + %3 = (%0) + + graph read entities(%1) + |> filter({graph::read::filter@11}, %3) + |> collect -> bb1(_) + } + + bb1(%2): { + return %2 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.jsonc new file mode 100644 index 00000000000..b841c80d1aa --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.jsonc @@ -0,0 +1,27 @@ +//@ run: pass +//@ description: Captured outer variable becomes env field access → Env parameter $N +// biome-ignore format: readability +["if", { "#literal": true }, + ["let", "target_uuid", + ["input", "target", "::graph::types::knowledge::entity::EntityUuid"], + ["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + "target_uuid" + ] + ] + ] + ] + ], + ["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + { "#literal": true } + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout new file mode 100644 index 00000000000..919229b1639 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_0_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Env(#0) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap new file mode 100644 index 00000000000..1c20e927e11 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW((($0)::bigint) & (($1)::bigint), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap new file mode 100644 index 00000000000..39c16eff221 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW((($0)::numeric) - (($1)::numeric), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap new file mode 100644 index 00000000000..975503ea588 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap @@ -0,0 +1,37 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: query_report.to_string() +--- +===================================== SQL ====================================== + +SELECT "entity_editions_0_0_1"."properties" AS "properties", jsonb_build_object($2, jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."web_id", $4, "entity_temporal_metadata_0_0_0"."entity_uuid", $5, "entity_temporal_metadata_0_0_0"."draft_id"), $5, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($6, "entity_temporal_metadata_0_0_0"."decision_time", $7, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", "entity_editions_0_0_1"."property_metadata" AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +INNER JOIN "entity_editions" AS "entity_editions_0_0_1" + ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" +INNER JOIN "entity_ids" AS "entity_ids_0_0_3" + ON "entity_ids_0_0_3"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_ids_0_0_3"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($8, "b", $9, "v")) AS "entity_type_ids" +FROM "entity_is_of_type_ids" AS "eit" +CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") +WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_2" + ON TRUE +LEFT OUTER JOIN "entity_has_left_entity" AS "entity_has_left_entity_0_0_4" + ON "entity_has_left_entity_0_0_4"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_has_left_entity_0_0_4"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +LEFT OUTER JOIN "entity_has_right_entity" AS "entity_has_right_entity_0_0_5" + ON "entity_has_right_entity_0_0_5"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_has_right_entity_0_0_5"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 +================================== Parameters ================================== + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Symbol(entity_id) +$3: Symbol(web_id) +$4: Symbol(entity_uuid) +$5: Symbol(draft_id) +$6: Symbol(decision_time) +$7: Symbol(transaction_time) +$8: Symbol(base_url) +$9: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap new file mode 100644 index 00000000000..1e9105a703b --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +CASE WHEN (($0)::int) = 0 THEN (ROW(0, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(1, NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap new file mode 100644 index 00000000000..090042496c2 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(jsonb_extract_path(jsonb_build_array(10, 20, 30), (($0)::text)), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap new file mode 100644 index 00000000000..86076201ec1 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(jsonb_extract_path(jsonb_build_object($0, 10, $1, 20), (($0)::text)), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap new file mode 100644 index 00000000000..255ac39acf3 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(jsonb_extract_path(jsonb_build_array(10, 20), ((0)::text)), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_empty_arrays.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_empty_arrays.snap new file mode 100644 index 00000000000..fb7afe651a2 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_empty_arrays.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(NULL, 1, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap new file mode 100644 index 00000000000..f9d10c3670b --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(NULL, 1, ARRAY[8]::int[], ARRAY[jsonb_build_object($0, jsonb_build_object($1, "entity_temporal_metadata_0_0_0"."web_id", $2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."draft_id"), $3, "entity_temporal_metadata_0_0_0"."draft_id")]::jsonb[])::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap new file mode 100644 index 00000000000..0817b4696aa --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +CASE WHEN (($6)::int) = 0 THEN (ROW(NULL, 2, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) WHEN (($6)::int) = 1 THEN (ROW(1, NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap new file mode 100644 index 00000000000..4a054e73d70 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(NULL, 1, ARRAY[7]::int[], ARRAY[jsonb_build_object($0, jsonb_build_object($1, "entity_temporal_metadata_0_0_0"."web_id", $2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."draft_id"), $3, "entity_temporal_metadata_0_0_0"."draft_id")]::jsonb[])::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap new file mode 100644 index 00000000000..0dacf67c35d --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW("entity_has_left_entity_0_0_1"."left_entity_uuid" = $0, NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap new file mode 100644 index 00000000000..26e1b17745d --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text), (($1)::text)) = $2, NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap new file mode 100644 index 00000000000..1d66026777a --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text)) = $1, NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap new file mode 100644 index 00000000000..fd7c02275f8 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap @@ -0,0 +1,39 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +===================================== SQL ====================================== + +SELECT ("entity_editions_0_0_1"."properties" - $99) AS "properties", jsonb_build_object($2, jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."web_id", $4, "entity_temporal_metadata_0_0_0"."entity_uuid", $5, "entity_temporal_metadata_0_0_0"."draft_id"), $5, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($6, "entity_temporal_metadata_0_0_0"."decision_time", $7, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", ("entity_editions_0_0_1"."property_metadata" - $99) AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance", ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +INNER JOIN "entity_editions" AS "entity_editions_0_0_1" + ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" +INNER JOIN "entity_ids" AS "entity_ids_0_0_3" + ON "entity_ids_0_0_3"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_ids_0_0_3"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($8, "b", $9, "v")) AS "entity_type_ids" +FROM "entity_is_of_type_ids" AS "eit" +CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") +WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_2" + ON TRUE +LEFT OUTER JOIN "entity_has_left_entity" AS "entity_has_left_entity_0_0_4" + ON "entity_has_left_entity_0_0_4"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_has_left_entity_0_0_4"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +LEFT OUTER JOIN "entity_has_right_entity" AS "entity_has_right_entity_0_0_5" + ON "entity_has_right_entity_0_0_5"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_has_right_entity_0_0_5"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +CROSS JOIN LATERAL (SELECT (ROW(NULL, 1, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) AS "row" +OFFSET 0) AS "continuation_0_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE +================================== Parameters ================================== + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Symbol(entity_id) +$3: Symbol(web_id) +$4: Symbol(entity_uuid) +$5: Symbol(draft_id) +$6: Symbol(decision_time) +$7: Symbol(transaction_time) +$8: Symbol(base_url) +$9: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap new file mode 100644 index 00000000000..81c22678cea --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap @@ -0,0 +1,37 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +===================================== SQL ====================================== + +SELECT "entity_editions_0_0_1"."properties" AS "properties", jsonb_build_object($2, jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."web_id", $4, "entity_temporal_metadata_0_0_0"."entity_uuid", $5, "entity_temporal_metadata_0_0_0"."draft_id"), $5, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($6, "entity_temporal_metadata_0_0_0"."decision_time", $7, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", "entity_editions_0_0_1"."property_metadata" AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +INNER JOIN "entity_editions" AS "entity_editions_0_0_1" + ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" +INNER JOIN "entity_ids" AS "entity_ids_0_0_3" + ON "entity_ids_0_0_3"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_ids_0_0_3"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($8, "b", $9, "v")) AS "entity_type_ids" +FROM "entity_is_of_type_ids" AS "eit" +CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") +WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_2" + ON TRUE +LEFT OUTER JOIN "entity_has_left_entity" AS "entity_has_left_entity_0_0_4" + ON "entity_has_left_entity_0_0_4"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_has_left_entity_0_0_4"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +LEFT OUTER JOIN "entity_has_right_entity" AS "entity_has_right_entity_0_0_5" + ON "entity_has_right_entity_0_0_5"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" + AND "entity_has_right_entity_0_0_5"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 +================================== Parameters ================================== + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Symbol(entity_id) +$3: Symbol(web_id) +$4: Symbol(entity_uuid) +$5: Symbol(draft_id) +$6: Symbol(decision_time) +$7: Symbol(transaction_time) +$8: Symbol(base_url) +$9: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap new file mode 100644 index 00000000000..468494abd36 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW($0, NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap new file mode 100644 index 00000000000..ec6c8b5269b --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +CASE WHEN (($0)::int) = 0 THEN (ROW(1, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(0, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 2 THEN (ROW(1, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 3 THEN (ROW(0, NULL, NULL, NULL)::continuation) ELSE (ROW(1, NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap new file mode 100644 index 00000000000..c44b14d5fd5 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(~($0), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap new file mode 100644 index 00000000000..abbb31c653c --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(-($0), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap new file mode 100644 index 00000000000..f2066fb8906 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap @@ -0,0 +1,7 @@ +--- +source: libs/@local/hashql/eval/src/postgres/filter/tests.rs +expression: report.to_string() +--- +==================== Island (entry: bb0, target: postgres) ===================== + +(ROW(NOT($0), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.aux.mir new file mode 100644 index 00000000000..d1e8ae0ff3e --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.aux.mir @@ -0,0 +1,66 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#6}() -> Boolean { + let %0: Boolean + + bb0(): { + %0 = input LOAD flag + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: ::graph::types::knowledge::entity::EntityUuid + let %3: Boolean + let %4: ::graph::types::knowledge::entity::EntityUuid + let %5: Boolean + let %6: Boolean + + bb0(): { // postgres + %6 = input LOAD flag + + switchInt(%6) -> [0: bb2(), 1: bb1()] + } + + bb1(): { // postgres + %2 = input LOAD id_a + %3 = %1.metadata.record_id.entity_id.entity_uuid == %2 + + return %3 + } + + bb2(): { // postgres + %4 = input LOAD id_b + %5 = %1.metadata.record_id.entity_id.entity_uuid == %4 + + return %5 + } +} + +thunk {thunk#8}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.jsonc new file mode 100644 index 00000000000..39c0b40a3c1 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.jsonc @@ -0,0 +1,21 @@ +//@ run: pass +//@ description: Runtime branching - input parameter as discriminant forces CASE WHEN in SQL +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["if", + ["input", "flag", "Boolean"], + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "id_a", "::graph::types::knowledge::entity::EntityUuid"] + ], + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "id_b", "::graph::types::knowledge::entity::EntityUuid"] + ] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout new file mode 100644 index 00000000000..8e4bcee5cf2 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout @@ -0,0 +1,15 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $3, NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $4, NULL, NULL, NULL)::continuation) END AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(flag) +$3: Input(id_b) +$4: Input(id_a) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.aux.mir new file mode 100644 index 00000000000..702591e451a --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.aux.mir @@ -0,0 +1,80 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#3}() -> Boolean { + let %0: Boolean + + bb0(): { + %0 = input EXISTS optional_flag + + return %0 + } +} + +thunk {thunk#4}() -> Boolean { + let %0: Boolean + let %1: Boolean + + bb0(): { + %1 = input EXISTS optional_flag + + switchInt(%1) -> [0: bb2(), 1: bb1()] + } + + bb1(): { + %0 = input LOAD optional_flag + + return %0 + } + + bb2(): { + return 1 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: Boolean + + bb0(): { // postgres + %3 = input EXISTS optional_flag + + switchInt(%3) -> [0: bb2(), 1: bb1()] + } + + bb1(): { // postgres + %2 = input LOAD optional_flag + + return %2 + } + + bb2(): { // postgres + return 1 + } +} + +thunk {thunk#5}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.jsonc new file mode 100644 index 00000000000..730cb9f87f7 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.jsonc @@ -0,0 +1,11 @@ +//@ run: pass +//@ description: Input parameter exists check - InputOp::Exists produces NOT IS NULL +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["input", "optional_flag", "Boolean", { "#literal": true }] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout new file mode 100644 index 00000000000..baaa1eb2e02 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2 IS NOT NULL)::int) = 0 THEN (ROW(1, NULL, NULL, NULL)::continuation) WHEN (($2 IS NOT NULL)::int) = 1 THEN (ROW($2, NULL, NULL, NULL)::continuation) END AS "row" +OFFSET 0) AS "continuation_3_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(optional_flag) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.aux.mir new file mode 100644 index 00000000000..5bd46f8214a --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.aux.mir @@ -0,0 +1,50 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD user_id + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %3 = input LOAD user_id + %2 = %1.metadata.record_id.entity_id.entity_uuid == %3 + + return %2 + } +} + +thunk {thunk#4}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.jsonc new file mode 100644 index 00000000000..3579696c0fb --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Input parameter as comparison operand - tests RValue::Input compilation and parameter dedup +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "user_id", "::graph::types::knowledge::entity::EntityUuid"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout new file mode 100644 index 00000000000..f2c08adb13e --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(user_id) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/left-entity-filter.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/left-entity-filter.jsonc new file mode 100644 index 00000000000..141b5d66c76 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/left-entity-filter.jsonc @@ -0,0 +1,14 @@ +//@ run: skip reason="link_data is Option; accessing fields through Option requires unwrap/pattern-match not yet expressible in filter J-Expr" +//@ description: Left entity UUID filter - LEFT OUTER JOIN on entity_has_left_entity +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.link_data.left_entity_id.entity_uuid", + ["input", "id", "::graph::types::knowledge::entity::EntityUuid"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.aux.mir new file mode 100644 index 00000000000..99c0ab174d5 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.aux.mir @@ -0,0 +1,50 @@ +thunk {thunk#2}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk x:0() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD user_id + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %3 = input LOAD user_id + %2 = %1.metadata.record_id.entity_id.entity_uuid == %3 + + return %2 + } +} + +thunk {thunk#4}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.jsonc new file mode 100644 index 00000000000..b8bafa9556b --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.jsonc @@ -0,0 +1,16 @@ +//@ run: pass +//@ description: Let-binding with input parameter - locals map tracks intermediate values +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["let", "x", ["input", "user_id", "::graph::types::knowledge::entity::EntityUuid"], + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + "x" + ] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout new file mode 100644 index 00000000000..f2c08adb13e --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(user_id) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.aux.mir new file mode 100644 index 00000000000..304c2591a3b --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.aux.mir @@ -0,0 +1,66 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD u + + return %0 + } +} + +thunk {thunk#3}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD v + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: List<::graph::types::knowledge::entity::EntityUuid> + let %3: List<::graph::types::knowledge::entity::EntityUuid> + let %4: Boolean + let %5: ::graph::types::knowledge::entity::EntityUuid + let %6: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %6 = input LOAD u + %2 = list(%1.metadata.record_id.entity_id.entity_uuid, %6) + %5 = input LOAD v + %3 = list(%5, %1.metadata.record_id.entity_id.entity_uuid) + %4 = %2 == %3 + + return %4 + } +} + +thunk {thunk#7}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.jsonc new file mode 100644 index 00000000000..b8087061f0d --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.jsonc @@ -0,0 +1,20 @@ +//@ run: pass +//@ description: List aggregate from entity fields - AggregateKind::List → jsonb_build_array +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + { "#list": [ + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "u", "::graph::types::knowledge::entity::EntityUuid"] + ]}, + { "#list": [ + ["input", "v", "::graph::types::knowledge::entity::EntityUuid"], + "vertex.metadata.record_id.entity_id.entity_uuid" + ]} + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout new file mode 100644 index 00000000000..6da76e10685 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout @@ -0,0 +1,14 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", $2) = jsonb_build_array($3, "entity_temporal_metadata_0_0_0"."entity_uuid"), NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_3_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(u) +$3: Input(v) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.aux.mir new file mode 100644 index 00000000000..ef01d32c99d --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.aux.mir @@ -0,0 +1,80 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> Boolean { + let %0: Boolean + + bb0(): { + %0 = input LOAD a + + return %0 + } +} + +thunk {thunk#4}() -> Boolean { + let %0: Boolean + let %1: Boolean + + bb0(): { + %1 = input LOAD a + + switchInt(%1) -> [0: bb2(), 1: bb1()] + } + + bb1(): { + %0 = input LOAD b + + return %0 + } + + bb2(): { + return 0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: Boolean + + bb0(): { // postgres + %3 = input LOAD a + + switchInt(%3) -> [0: bb2(), 1: bb1()] + } + + bb1(): { // postgres + %2 = input LOAD b + + return %2 + } + + bb2(): { // postgres + return 0 + } +} + +thunk {thunk#5}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.jsonc new file mode 100644 index 00000000000..9c252b62e5c --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Short-circuit AND on inputs - && desugars to if/else, survives as CASE WHEN +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["&&", + ["input", "a", "Boolean"], + ["input", "b", "Boolean"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout new file mode 100644 index 00000000000..d3ae0a3d6a5 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout @@ -0,0 +1,14 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW(0, NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW($3, NULL, NULL, NULL)::continuation) END AS "row" +OFFSET 0) AS "continuation_3_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(a) +$3: Input(b) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/logical-not-input.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/logical-not-input.jsonc new file mode 100644 index 00000000000..37fcee54ac4 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/logical-not-input.jsonc @@ -0,0 +1,11 @@ +//@ run: skip reason="::core::bool::not intrinsic not supported in specialization yet (H-4729)" +//@ description: Unary NOT on input parameter - UnOp::Not produces NOT in SQL +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["!", ["input", "flag", "Boolean"]] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.aux.mir new file mode 100644 index 00000000000..1b70eb198c4 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.aux.mir @@ -0,0 +1,50 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::principal::actor_group::web::WebId { + let %0: ::graph::types::principal::actor_group::web::WebId + + bb0(): { + %0 = input LOAD expected_web + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::principal::actor_group::web::WebId + + bb0(): { // postgres + %3 = input LOAD expected_web + %2 = %1.metadata.record_id.entity_id.web_id == %3 + + return %2 + } +} + +thunk {thunk#4}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.jsonc new file mode 100644 index 00000000000..37d4744e35f --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.jsonc @@ -0,0 +1,14 @@ +//@ run: pass +//@ description: Only base table fields accessed - no unnecessary joins in FROM clause +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.web_id", + ["input", "expected_web", "::graph::types::principal::actor_group::web::WebId"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout new file mode 100644 index 00000000000..590f4337436 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."web_id" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(expected_web) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.aux.mir new file mode 100644 index 00000000000..7e038965ce3 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.aux.mir @@ -0,0 +1,49 @@ +fn {graph::read::filter@13}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + + bb0(): { // postgres + %2 = !%1.metadata.archived + + return %2 + } +} + +fn {graph::read::filter@20}(%0: (::graph::types::knowledge::entity::EntityUuid,), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + + bb0(): { // postgres + %2 = %1.metadata.record_id.entity_id.entity_uuid == %0.0 + + return %2 + } +} + +fn {graph::read::filter@36}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + bb0(): { // interpreter + return 1 + } +} + +thunk {thunk#10}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: ::graph::types::knowledge::entity::EntityUuid + let %1: ::graph::TimeAxis + let %2: List<::graph::types::knowledge::entity::Entity> + let %3: () + let %4: (::graph::types::knowledge::entity::EntityUuid,) + + bb0(): { + %0 = input LOAD env_val + %1 = input LOAD time_axis + %3 = () + %4 = (%0) + + graph read entities(%1) + |> filter({graph::read::filter@13}, %3) + |> filter({graph::read::filter@20}, %4) + |> collect -> bb1(_) + } + + bb1(%2): { + return %2 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.jsonc new file mode 100644 index 00000000000..b773e1a7f0e --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.jsonc @@ -0,0 +1,35 @@ +//@ run: pass +//@ description: Multiple parameter categories in one filter - temporal, env, input, primitive, entity paths +// biome-ignore format: readability +["if", { "#literal": true }, + ["let", "env_uuid", + ["input", "env_val", "::graph::types::knowledge::entity::EntityUuid"], + ["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.archived", + { "#literal": false } + ] + ] + ], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + "env_uuid" + ] + ] + ] + ] + ], + ["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + { "#literal": true } + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout new file mode 100644 index 00000000000..48f498a6cb3 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout @@ -0,0 +1,17 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values", ("continuation_1_0"."row")."block" AS "continuation_1_0_block", ("continuation_1_0"."row")."locals" AS "continuation_1_0_locals", ("continuation_1_0"."row")."values" AS "continuation_1_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +INNER JOIN "entity_editions" AS "entity_editions_0_0_1" + ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" +CROSS JOIN LATERAL (SELECT (ROW(NOT("entity_editions_0_0_1"."archived"), NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_1_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE AND ("continuation_1_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Env(#0) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.aux.mir new file mode 100644 index 00000000000..b35b02a348c --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.aux.mir @@ -0,0 +1,75 @@ +thunk {thunk#2}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#3}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD uuid + + return %0 + } +} + +thunk {thunk#5}() -> ::graph::types::principal::actor_group::web::WebId { + let %0: ::graph::types::principal::actor_group::web::WebId + + bb0(): { + %0 = input LOAD web + + return %0 + } +} + +fn {graph::read::filter@9}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %3 = input LOAD uuid + %2 = %1.metadata.record_id.entity_id.entity_uuid == %3 + + return %2 + } +} + +fn {graph::read::filter@18}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::principal::actor_group::web::WebId + + bb0(): { // postgres + %3 = input LOAD web + %2 = %1.metadata.record_id.entity_id.web_id == %3 + + return %2 + } +} + +thunk {thunk#7}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: () + let %3: ::graph::TimeAxis + + bb0(): { + %3 = input LOAD time_axis + %1 = () + %2 = () + + graph read entities(%3) + |> filter({graph::read::filter@9}, %1) + |> filter({graph::read::filter@18}, %2) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.jsonc new file mode 100644 index 00000000000..3c7585d6cdf --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.jsonc @@ -0,0 +1,22 @@ +//@ run: pass +//@ description: Two separate filter bodies - two CROSS JOIN LATERAL subqueries, two continuation aliases +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "uuid", "::graph::types::knowledge::entity::EntityUuid"] + ] + ] + ], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.web_id", + ["input", "web", "::graph::types::principal::actor_group::web::WebId"] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout new file mode 100644 index 00000000000..f952da11bca --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout @@ -0,0 +1,16 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values", ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_3_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."web_id" = $3, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_4_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE AND ("continuation_4_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(uuid) +$3: Input(web) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.aux.mir new file mode 100644 index 00000000000..e5f2fb7b56a --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.aux.mir @@ -0,0 +1,82 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#10}() -> Boolean { + let %0: Boolean + + bb0(): { + %0 = input LOAD foo + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::knowledge::entity::EntityUuid + let %4: Boolean + let %5: ::graph::types::knowledge::entity::EntityUuid + let %6: Boolean + let %7: ::graph::types::knowledge::entity::EntityUuid + let %8: Boolean + let %9: Boolean + + bb0(): { // postgres + %9 = input LOAD foo + + switchInt(%9) -> [0: bb4(), 1: bb1()] + } + + bb1(): { // postgres + %2 = input LOAD bar + + switchInt(%2) -> [0: bb3(), 1: bb2()] + } + + bb2(): { // postgres + %3 = input LOAD id_a + %4 = %1.metadata.record_id.entity_id.entity_uuid == %3 + + return %4 + } + + bb3(): { // postgres + %5 = input LOAD id_b + %6 = %1.metadata.record_id.entity_id.entity_uuid == %5 + + return %6 + } + + bb4(): { // postgres + %7 = input LOAD id_c + %8 = %1.metadata.record_id.entity_id.entity_uuid == %7 + + return %8 + } +} + +thunk {thunk#12}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.jsonc new file mode 100644 index 00000000000..8b987ef882f --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.jsonc @@ -0,0 +1,28 @@ +//@ run: pass +//@ description: Nested runtime branching - nested CASE WHEN from nested if with input discriminants +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["if", + ["input", "foo", "Boolean"], + ["if", + ["input", "bar", "Boolean"], + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "id_a", "::graph::types::knowledge::entity::EntityUuid"] + ], + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "id_b", "::graph::types::knowledge::entity::EntityUuid"] + ] + ], + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["input", "id_c", "::graph::types::knowledge::entity::EntityUuid"] + ] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout new file mode 100644 index 00000000000..6e27450ca62 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout @@ -0,0 +1,17 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $3, NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN CASE WHEN (($4)::int) = 0 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $5, NULL, NULL, NULL)::continuation) WHEN (($4)::int) = 1 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $6, NULL, NULL, NULL)::continuation) END END AS "row" +OFFSET 0) AS "continuation_2_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(foo) +$3: Input(id_c) +$4: Input(bar) +$5: Input(id_b) +$6: Input(id_a) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/nested-property-access.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/nested-property-access.jsonc new file mode 100644 index 00000000000..502dafae240 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/nested-property-access.jsonc @@ -0,0 +1,14 @@ +//@ run: skip reason="property subscript requires a concrete entity type; the type system cannot resolve unknown field access on vertex.properties yet" +//@ description: Multi-level property access - json_extract_path with multiple keys +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + [".", [".", "vertex.properties", "address"], "city"], + { "#literal": "London" } + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.aux.mir new file mode 100644 index 00000000000..fda48bf5412 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.aux.mir @@ -0,0 +1,120 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> String { + let %0: String + + bb0(): { + %0 = input LOAD id + + return %0 + } +} + +fn {ctor#::core::uuid::Uuid}(%0: (), %1: String) -> ::core::uuid::Uuid { + let %2: ::core::uuid::Uuid + + bb0(): { + %2 = opaque(::core::uuid::Uuid, %1) + + return %2 + } +} + +thunk {thunk#3}() -> (String) -> ::core::uuid::Uuid { + let %0: (String) -> ::core::uuid::Uuid + + bb0(): { + %0 = closure(({ctor#::core::uuid::Uuid} as FnPtr), ()) + + return %0 + } +} + +thunk {thunk#4}() -> ::core::uuid::Uuid { + let %0: ::core::uuid::Uuid + let %1: String + + bb0(): { + %1 = input LOAD id + %0 = opaque(::core::uuid::Uuid, %1) + + return %0 + } +} + +fn {ctor#::graph::types::knowledge::entity::EntityUuid}(%0: (), %1: ::core::uuid::Uuid) -> ::graph::types::knowledge::entity::EntityUuid { + let %2: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %2 = opaque(::graph::types::knowledge::entity::EntityUuid, %1) + + return %2 + } +} + +thunk {thunk#5}() -> (::core::uuid::Uuid) -> ::graph::types::knowledge::entity::EntityUuid { + let %0: (::core::uuid::Uuid) -> ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = closure(({ctor#::graph::types::knowledge::entity::EntityUuid} as FnPtr), ()) + + return %0 + } +} + +thunk {thunk#6}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + let %1: ::core::uuid::Uuid + let %2: String + + bb0(): { + %2 = input LOAD id + %1 = opaque(::core::uuid::Uuid, %2) + %0 = opaque(::graph::types::knowledge::entity::EntityUuid, %1) + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: Boolean + let %3: ::graph::types::knowledge::entity::EntityUuid + let %4: ::core::uuid::Uuid + let %5: String + + bb0(): { // postgres + %5 = input LOAD id + %4 = opaque(::core::uuid::Uuid, %5) + %3 = opaque(::graph::types::knowledge::entity::EntityUuid, %4) + %2 = %1.metadata.record_id.entity_id.entity_uuid == %3 + + return %2 + } +} + +thunk {thunk#8}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.jsonc new file mode 100644 index 00000000000..2938737ea24 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.jsonc @@ -0,0 +1,16 @@ +//@ run: pass +//@ description: Opaque type wrapper - AggregateKind::Opaque passes through inner operand unchanged +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + "vertex.metadata.record_id.entity_id.entity_uuid", + ["::graph::types::knowledge::entity::EntityUuid", + ["::core::uuid::Uuid", ["input", "id", "_"]] + ] + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout new file mode 100644 index 00000000000..aaeb8a8d408 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout @@ -0,0 +1,13 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_8_0"."row")."block" AS "continuation_8_0_block", ("continuation_8_0"."row")."locals" AS "continuation_8_0_locals", ("continuation_8_0"."row")."values" AS "continuation_8_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_8_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_8_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(id) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/properties-triggers-editions-join.stderr b/libs/@local/hashql/eval/tests/ui/postgres/properties-triggers-editions-join.stderr new file mode 100644 index 00000000000..c1062f4802f --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/properties-triggers-editions-join.stderr @@ -0,0 +1,10 @@ +error[lower::type-check::unconstrained-type-variable]: Unconstrained type variable + ╭▸ +10 │ ["input", "expected_props", "_"] + │ ━ Cannot infer type for this variable - no usage constraints available + │ + ├ help: Add an explicit type annotation to provide the necessary context. For example: + │ - Change `let x = ...` to `let x: Type = ...` + │ - Provide type parameters like `function(...)` + │ - Use the value in a way that constrains its type + ╰ note: Type inference needs constraints that come from how variables are used. When a variable lacks both usage context and explicit annotations, the type system cannot determine an appropriate type. This commonly occurs with empty collections, unused variables, or generic functions without type annotations. \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/property-field-equality.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/property-field-equality.jsonc new file mode 100644 index 00000000000..a4d6c1caec8 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/property-field-equality.jsonc @@ -0,0 +1,14 @@ +//@ run: skip reason="property subscript requires a concrete entity type; the type system cannot resolve unknown field access on vertex.properties yet" +//@ description: Property field equality - json_extract_path on entity_editions.properties +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + [".", "vertex.properties", "name"], + { "#literal": "Alice" } + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.aux.mir new file mode 100644 index 00000000000..f63dd51fca6 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.aux.mir @@ -0,0 +1,80 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD u + + return %0 + } +} + +thunk {thunk#3}() -> ::graph::types::principal::actor_group::web::WebId { + let %0: ::graph::types::principal::actor_group::web::WebId + + bb0(): { + %0 = input LOAD w + + return %0 + } +} + +thunk {thunk#5}() -> (uuid: ::graph::types::knowledge::entity::EntityUuid, web: ::graph::types::principal::actor_group::web::WebId) { + let %0: (uuid: ::graph::types::knowledge::entity::EntityUuid, web: ::graph::types::principal::actor_group::web::WebId) + let %1: ::graph::types::principal::actor_group::web::WebId + let %2: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %2 = input LOAD u + %1 = input LOAD w + %0 = (uuid: %2, web: %1) + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: (uuid: ::graph::types::knowledge::entity::EntityUuid, web: ::graph::types::principal::actor_group::web::WebId) + let %3: Boolean + let %4: (uuid: ::graph::types::knowledge::entity::EntityUuid, web: ::graph::types::principal::actor_group::web::WebId) + let %5: ::graph::types::principal::actor_group::web::WebId + let %6: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %2 = (uuid: %1.metadata.record_id.entity_id.entity_uuid, web: %1.metadata.record_id.entity_id.web_id) + %6 = input LOAD u + %5 = input LOAD w + %4 = (uuid: %6, web: %5) + %3 = %2 == %4 + + return %3 + } +} + +thunk {thunk#7}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.jsonc new file mode 100644 index 00000000000..5c761847652 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.jsonc @@ -0,0 +1,20 @@ +//@ run: pass +//@ description: Struct aggregate from entity fields - AggregateKind::Struct → jsonb_build_object +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + { "#struct": { + "uuid": "vertex.metadata.record_id.entity_id.entity_uuid", + "web": "vertex.metadata.record_id.entity_id.web_id" + }}, + { "#struct": { + "uuid": ["input", "u", "::graph::types::knowledge::entity::EntityUuid"], + "web": ["input", "w", "::graph::types::principal::actor_group::web::WebId"] + }} + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout new file mode 100644 index 00000000000..95f2814ac7a --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout @@ -0,0 +1,16 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_object($2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $4, $3, $5), NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_4_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Symbol(uuid) +$3: Symbol(web) +$4: Input(u) +$5: Input(w) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.aux.mir b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.aux.mir new file mode 100644 index 00000000000..8a165d7b2fe --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.aux.mir @@ -0,0 +1,80 @@ +thunk {thunk#1}() -> ::graph::TimeAxis { + let %0: ::graph::TimeAxis + + bb0(): { + %0 = input LOAD time_axis + + return %0 + } +} + +thunk {thunk#2}() -> ::graph::types::knowledge::entity::EntityUuid { + let %0: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %0 = input LOAD u + + return %0 + } +} + +thunk {thunk#3}() -> ::graph::types::principal::actor_group::web::WebId { + let %0: ::graph::types::principal::actor_group::web::WebId + + bb0(): { + %0 = input LOAD w + + return %0 + } +} + +thunk {thunk#5}() -> (::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId) { + let %0: (::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId) + let %1: ::graph::types::principal::actor_group::web::WebId + let %2: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { + %2 = input LOAD u + %1 = input LOAD w + %0 = (%2, %1) + + return %0 + } +} + +fn {graph::read::filter@7}(%0: (), %1: ::graph::types::knowledge::entity::Entity) -> Boolean { + let %2: (::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId) + let %3: Boolean + let %4: (::graph::types::knowledge::entity::EntityUuid, ::graph::types::principal::actor_group::web::WebId) + let %5: ::graph::types::principal::actor_group::web::WebId + let %6: ::graph::types::knowledge::entity::EntityUuid + + bb0(): { // postgres + %2 = (%1.metadata.record_id.entity_id.entity_uuid, %1.metadata.record_id.entity_id.web_id) + %6 = input LOAD u + %5 = input LOAD w + %4 = (%6, %5) + %3 = %2 == %4 + + return %3 + } +} + +thunk {thunk#7}() -> List<::graph::types::knowledge::entity::Entity> { + let %0: List<::graph::types::knowledge::entity::Entity> + let %1: () + let %2: ::graph::TimeAxis + + bb0(): { + %2 = input LOAD time_axis + %1 = () + + graph read entities(%2) + |> filter({graph::read::filter@7}, %1) + |> collect -> bb1(_) + } + + bb1(%0): { + return %0 + } +} \ No newline at end of file diff --git a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.jsonc b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.jsonc new file mode 100644 index 00000000000..842e5cf1537 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.jsonc @@ -0,0 +1,20 @@ +//@ run: pass +//@ description: Tuple aggregate from entity fields - AggregateKind::Tuple → jsonb_build_array +// biome-ignore format: readability +["::graph::tail::collect", + ["::graph::body::filter", + ["::graph::head::entities", ["input", "time_axis", "_"]], + ["fn", { "#tuple": [] }, { "#struct": { "vertex": "_" } }, "_", + ["==", + { "#tuple": [ + "vertex.metadata.record_id.entity_id.entity_uuid", + "vertex.metadata.record_id.entity_id.web_id" + ]}, + { "#tuple": [ + ["input", "u", "::graph::types::knowledge::entity::EntityUuid"], + ["input", "w", "::graph::types::principal::actor_group::web::WebId"] + ]} + ] + ] + ] +] diff --git a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout new file mode 100644 index 00000000000..99c19319d40 --- /dev/null +++ b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout @@ -0,0 +1,14 @@ +════ SQL ═══════════════════════════════════════════════════════════════════════ + +SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" +FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" +CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_array($2, $3), NULL, NULL, NULL)::continuation) AS "row" +OFFSET 0) AS "continuation_4_0" +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE + +════ Parameters ════════════════════════════════════════════════════════════════ + +$0: TemporalAxis(Transaction) +$1: TemporalAxis(Decision) +$2: Input(u) +$3: Input(w) From 7d706db93f416ea990218ad7ee8c35a2592f53c8 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 16:39:39 +0100 Subject: [PATCH 2/7] feat: move hunk up --- Cargo.lock | 3 +++ libs/@local/hashql/eval/src/error.rs | 3 +++ libs/@local/hashql/eval/src/lib.rs | 3 ++- 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 59fcffbd454..a55ea8bc8f4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3857,11 +3857,14 @@ name = "hashql-eval" version = "0.0.0" dependencies = [ "derive_more", + "hash-graph-postgres-store", "hash-graph-store", "hashql-compiletest", "hashql-core", "hashql-diagnostics", "hashql-hir", + "hashql-mir", + "insta", "simple-mermaid", "type-system", ] diff --git a/libs/@local/hashql/eval/src/error.rs b/libs/@local/hashql/eval/src/error.rs index 9ebf5fbb811..bdfc6b5e556 100644 --- a/libs/@local/hashql/eval/src/error.rs +++ b/libs/@local/hashql/eval/src/error.rs @@ -5,6 +5,7 @@ use hashql_diagnostics::{Diagnostic, DiagnosticIssues, Severity, category::Diagn #[cfg(feature = "graph")] use crate::graph::error::GraphCompilerDiagnosticCategory; +use crate::postgres::error::PostgresDiagnosticCategory; pub type EvalDiagnostic = Diagnostic; pub type EvalDiagnosticIssues = DiagnosticIssues; @@ -13,6 +14,7 @@ pub type EvalDiagnosticIssues = DiagnosticIssues Some(graph), + Self::Postgres(postgres) => Some(postgres), } } } diff --git a/libs/@local/hashql/eval/src/lib.rs b/libs/@local/hashql/eval/src/lib.rs index dce737dd692..f70c4022b28 100644 --- a/libs/@local/hashql/eval/src/lib.rs +++ b/libs/@local/hashql/eval/src/lib.rs @@ -19,10 +19,11 @@ )] extern crate alloc; - +pub mod context; pub mod error; #[cfg(feature = "graph")] pub mod graph; +pub mod postgres; #[cfg(test)] mod tests { From 9fe823dc3e06a3ac9fbd2c2645a1b03f88c692f7 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 16:55:19 +0100 Subject: [PATCH 3/7] fix: remove test plan --- libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md | 605 ------------------ 1 file changed, 605 deletions(-) delete mode 100644 libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md diff --git a/libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md b/libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md deleted file mode 100644 index d86ba5de79f..00000000000 --- a/libs/@local/hashql/eval/POSTGRES_TEST_PLAN.md +++ /dev/null @@ -1,605 +0,0 @@ -# PostgreSQL Evaluator Test Plan - -Comprehensive test plan for `libs/@local/hashql/eval/src/postgres/` — the CFG-to-SQL lowering -module that compiles MIR islands into PostgreSQL `SELECT` statements. - -**Status:** Tier 1 compiletest coverage complete (21 tests); remaining Tier 1 tests blocked on -missing HIR features. Tier 3 MIR builder snapshot tests mostly complete (15/17 tests). Tier 2 -unit tests not yet started. - -**Legend:** ✅ done · ⏭ skipped (reason noted) · 📸 needs snapshot test · ❌ not started - ---- - -## Tier 1: Compiletest Suite — `eval/postgres/entity` - -End-to-end tests: J-Expr → AST → HIR → MIR → execution analysis → `PostgresCompiler::compile()` -→ transpiled SQL string. - -### Suite implementation - -New file: `compiletest/src/suite/eval_postgres_entity.rs` - -Pipeline: - -1. Reuse `mir_reify::mir_reify()` to get `(root_def, bodies)` -2. Run the standard MIR optimization pipeline (reuse whatever shared pipeline helper exists, - or centralize one — do NOT manually list individual passes, as that will drift) -3. Run `ExecutionAnalysis` on graph read filter bodies → `IslandGraph` -4. Build `EvalContext::new_in()` (computes `LiveOut` automatically) -5. Walk root body to find `GraphRead` terminators -6. Call `PostgresCompiler::compile(graph_read)` -7. Output `statement.transpile_to_string()` + parameter summary - -Test location: `eval/tests/ui/postgres/entity/` -Spec: `suite = "eval/postgres/entity"` - -Output format (in `.stdout`): - -``` -════ SQL ════════════════════════════════════════════════════════════════════════ - -SELECT ... -FROM ... -WHERE ... - -════ Parameters ════════════════════════════════════════════════════════════════ - -$1: TemporalAxis(Transaction) -$2: TemporalAxis(Decision) -$3: Symbol("entity_uuid") -... -``` - -**Harness note — parameter summary:** `Parameters`' internal `reverse` mapping uses a private -enum. To output the parameter summary, either add a `pub fn debug_summary(&self) -> String` -behind a `#[cfg(test)]` or a dedicated feature gate, or reduce output to just parameter count -and indices. Decide during implementation. - -**Snapshot stability:** avoid asserting specific `DefId`/`IslandId` numbers in continuation -aliases (e.g. `continuation_0_0`). These depend on upstream lowering/inline decisions. Assert -patterns instead: aliases start with `continuation_`, SELECT includes `*_block`, `*_locals`, -`*_values` columns. - -### Tests — basic control flow - -Note: constant expressions (`if true then ...`) are folded away by the MIR optimization -pipeline (inst_simplify, forward_substitution, DCE, CFG simplification) before reaching the -postgres compiler. To test control flow, use `[input ...]` parameters — they're opaque to -the optimizer and force runtime branching. - -#### `constant-true-filter` ✅ - -- **run:** pass -- **input:** filter body that returns literal `true` (after optimization, the entire filter - body is a single-block `return true`) -- **tests:** simplest possible compilation — no branching, straight-line `Return` continuation, - temporal conditions on base table, continuation LATERAL with OFFSET 0 -- **verifies:** baseline SQL shape, continuation composite structure `(filter, NULL, NULL, NULL)::continuation` - -#### `if-input-branches` ✅ - -- **run:** pass -- **input:** `if [input foo] then else ` where `foo` is a boolean input - parameter and both branches are distinct runtime expressions -- **tests:** `SwitchInt` → `CASE WHEN discriminant = THEN ... ELSE ... END` structure, - both branches produce continuations, input parameter appears as `$N` -- **verifies:** CASE tree generation, branch ordering matches `SwitchTargets`, input parameter - compilation - -#### `nested-if-input-branches` ✅ - -- **run:** pass -- **input:** nested `if` with input discriminants: - `if [input foo] then (if [input bar] then else ) else ` -- **tests:** nested `CASE WHEN` expressions — inner CASE as result of outer CASE branch -- **verifies:** stack-based compilation produces correctly nested SQL, snapshot/rollback of - locals across branches - -### Tests — entity path access & comparisons - -#### `entity-uuid-equality` ✅ - -- **run:** pass -- **input:** `vertex.id.entity_id.entity_uuid == ` -- **tests:** entity path resolution for `EntityUuid`, equality comparison (no cast), primitive - parameter for the UUID string -- **verifies:** `EntityPath::EntityUuid` → correct column reference on `entity_temporal_metadata`, - comparison operators don't add unnecessary casts - -#### `entity-web-id-equality` ✅ - -- **run:** pass -- **input:** `vertex.id.entity_id.web_id == ` -- **tests:** entity path resolution for `WebId` -- **verifies:** `EntityPath::WebId` → correct column reference - -#### `entity-draft-id-equality` ✅ - -- **run:** pass -- **input:** `vertex.id.draft_id == ` -- **tests:** entity path resolution for `DraftId` -- **verifies:** `EntityPath::DraftId` → correct column reference -- **note:** execution analysis places the equality comparison in the interpreter, not in SQL. - The test still exercises DraftId path resolution via the provides set (column appears in - SELECT), but the `==` itself is an island exit, not an in-SQL comparison. - -#### `entity-archived-check` ✅ - -- **run:** pass -- **input:** filter on `vertex.metadata.archived == false` -- **tests:** entity path resolution for `Archived`, which requires `entity_editions` join -- **verifies:** `EntityPath::Archived` triggers `entity_editions` JOIN in FROM clause -- **note:** optimizer simplifies `archived == false` to `NOT(archived)`, which is fine — still - exercises the editions join and correct column reference. - -### Tests — property access - -#### `property-field-equality` ⏭ 📸 - -- **run:** skip -- **skip reason:** property subscript requires a concrete entity type; the type system cannot - resolve unknown field access on `vertex.properties` yet -- **input:** `vertex.properties. == "value"` -- **tests:** `EntityPath::Properties` → `entity_editions` join, property field access generates - `json_extract_path(properties, $key::text)`, equality comparison on jsonb -- **verifies:** json_extract_path chain, property access triggers entity_editions join, - parameter for field name symbol -- **covered by:** Tier 3 `property_field_equality` snapshot test - -#### `nested-property-access` ⏭ 📸 - -- **run:** skip -- **skip reason:** property subscript requires a concrete entity type (same as - `property-field-equality`) -- **input:** `vertex.properties.. == "value"` -- **tests:** multi-level property access → `json_extract_path(properties, $key1::text, $key2::text)` -- **verifies:** projection chain accumulates all indices into single json_extract_path call -- **covered by:** Tier 3 `nested_property_access` snapshot test - -### Tests — arithmetic & type casts - -The code casts differently depending on operator category: - -- **Arithmetic** (`Add`, `Sub`): both operands cast to `::numeric` -- **Bitwise** (`BitAnd`, `BitOr`): both operands cast to `::bigint` -- **Comparison** (`Eq`, `Ne`, `Lt`, `Lte`, `Gt`, `Gte`): **no cast** — operates on jsonb directly - -#### `comparison-no-cast` ✅ - -- **run:** pass -- **input:** `[input x] > [input y]` (input parameters, not properties — HIR lacks arithmetic - intrinsics so property access isn't needed; inputs exercise the same cast logic) -- **tests:** `BinOp::Gt` → `BinaryOperator::Greater` with **no** type casts on either operand -- **verifies:** comparison operators do not add unnecessary casts - -#### `arithmetic-addition-casts` ⏭ 📸 - -- **run:** skip -- **skip reason:** `::core::math::add` intrinsic not supported in HIR specialization yet - (H-4728) -- **input:** `[input x] + [input y] > 0` -- **tests:** `BinOp::Add` → `BinaryOperator::Add` with `::numeric` casts on both operands, - the result then compared with `>` (which itself does NOT cast) -- **verifies:** arithmetic ops cast to numeric, comparison on arithmetic result works, - correct nesting of cast vs. non-cast expressions -- **note:** addition exists in MIR (`bin.+`) but cannot be produced from J-Expr. Needs a - Tier 3 MIR builder snapshot test to exercise this code path. - -### Tests — boolean logic - -All boolean logic tests use input parameters as operands to prevent constant folding. - -Note: `logical-and-inputs` primarily verifies that `&&` desugaring survives optimization and -produces a CASE in SQL. If `if-input-branches` already covers CASE generation sufficiently, -one of these two can be dropped. Keep at least one AND/OR test as a smoke test for the -desugaring → SQL path. - -#### `logical-and-inputs` ✅ - -- **run:** pass -- **input:** `[input a] && [input b]` (two input parameters combined with AND) -- **tests:** `&&` desugars to `if a then b else false` — since `a` is a runtime input, the - branch survives optimization and produces a `CASE WHEN` in SQL -- **verifies:** short-circuit AND compiles to correct CASE structure -- **note:** execution analysis places both branches as island exits (the `b` load and the - literal `false` both transfer control to the interpreter). The test exercises island exit - from a SwitchInt, but does not produce a CASE tree with filter return branches. The - discriminant (`input a`) is evaluated in Postgres. - -#### `logical-not-input` ⏭ 📸 - -- **run:** skip -- **skip reason:** `::core::bool::not` intrinsic not supported in HIR specialization yet - (H-4729) -- **input:** `! [input a]` (negation of input parameter) -- **tests:** `UnOp::Not` → `UnaryOperator::Not` applied to an input parameter `$N` -- **verifies:** unary NOT in SQL output -- **note:** unary NOT exists in MIR (`un.!`) but cannot be produced from J-Expr. Needs a - Tier 3 MIR builder snapshot test to exercise this code path. - -### Tests — input parameters & environment - -#### `input-parameter-load` ✅ - -- **run:** pass -- **input:** filter comparing entity field to a user-provided input parameter -- **tests:** `RValue::Input(InputOp::Load)` → parameter `$N`, parameter deduplication - (same input referenced twice → same `$N`) -- **verifies:** input parameters allocated correctly, dedup works - -#### `input-parameter-exists` ✅ - -- **run:** pass -- **input:** optional input parameter with default: `[input "optional_flag" Boolean true]` -- **tests:** optional input desugars to `if exists(flag) then load(flag) else default` -- **verifies:** `InputOp::Exists` appears in MIR; execution analysis places the SwitchInt - and both branches in the interpreter via island exit -- **note:** the `InputOp::Exists` → `NOT IS NULL` SQL pattern is NOT exercised because the - exists check is placed in the interpreter, not in the Postgres island. The test verifies - optional input desugaring + island exit behavior. `NOT IS NULL` needs a Tier 3 test. - -#### `env-captured-variable` ✅ - -- **run:** pass -- **input:** filter referencing an input from outer scope (captured in closure environment); - wrapped in `if true then ... else ...` to prevent thunk conversion + subsequent inlining - from eliminating the env access -- **tests:** environment field access → `env.` → parameter `$N` via `db.parameters.env()` -- **verifies:** captured variables become `Env(#N)` parameters, field projection on env works - -### Tests — aggregate construction - -#### `struct-construction` ✅ - -- **run:** pass -- **input:** filter that constructs a struct value (may appear in intermediate computations) -- **tests:** `AggregateKind::Struct` → `jsonb_build_object(key1, val1, key2, val2)` -- **verifies:** struct field names become symbol parameters, values are compiled operands - -#### `tuple-construction` ✅ - -- **run:** pass -- **input:** filter that constructs a tuple value -- **tests:** `AggregateKind::Tuple` → `jsonb_build_array(val1, val2, ...)` -- **verifies:** tuple elements become jsonb_build_array arguments - -#### `list-construction` ✅ - -- **run:** pass -- **input:** filter that constructs a list value -- **tests:** `AggregateKind::List` → `jsonb_build_array(val1, val2, ...)` -- **verifies:** list and tuple use the same lowering (`jsonb_build_array`) but are distinct - code paths — ensures the `List` match arm works - -#### `dict-construction` ✅ - -- **run:** pass -- **input:** filter that constructs a dict/map value -- **tests:** `AggregateKind::Dict` → `jsonb_build_object(k1, v1, k2, v2)` with operands - consumed in pairs via `array_chunks()` -- **verifies:** key-value pairing logic (the `operands.len() % 2 == 0` invariant and the - chunked iteration) - -#### `opaque-passthrough` ✅ - -- **run:** pass -- **input:** filter involving an opaque type wrapper (e.g. `EntityUuid(Uuid(...))`) -- **tests:** `AggregateKind::Opaque` → passes through the single inner operand unchanged -- **verifies:** opaque wrapper is transparent in SQL — no wrapping function, just the inner - expression - -#### `let-binding-propagation` ✅ - -- **run:** pass -- **input:** filter with let-bindings referencing input parameters, used in the filter condition - (e.g. `let x = [input foo] in vertex.properties.field == x`) -- **tests:** locals map tracks intermediate values correctly through compilation — the let-bound - local holds an input parameter expression, which is then used in a comparison -- **verifies:** let-bound values propagate correctly through the local → expression mapping, - input parameter deduplication still works across let-bindings - -### Tests — relationship / edge entity fields - -#### `left-entity-filter` ⏭ 📸 - -- **run:** skip -- **skip reason:** `link_data` is `Option`; accessing fields through Option requires - unwrap/pattern-match not yet expressible in filter J-Expr -- **input:** filter on `vertex.link_data.left_entity_id.entity_uuid == [input id]` -- **tests:** `EntityPath::LeftEntityUuid` → LEFT OUTER JOIN on `entity_has_left_entity`, - correct column reference -- **verifies:** edge/relationship fields trigger the correct join type (LEFT OUTER, not INNER) -- **covered by:** Tier 3 `left_entity_filter` snapshot test - -### Tests — multi-source "kitchen sink" - -#### `mixed-sources-filter` ✅ - -- **run:** pass -- **input:** two-filter pipeline: first checks `vertex.metadata.archived == false` (editions - join + primitive), second checks `vertex.entity_uuid == env_uuid` (env capture via - `if true then ...` anti-inlining wrapper) -- **tests:** exercises multiple parameter categories simultaneously: `TemporalAxis` (always - present), `Env` (captured variable); also exercises entity_editions join (archived), - two continuation laterals, two WHERE conditions -- **verifies:** parameter categories coexist, multiple join types in one query, correct - WHERE composition (temporal + continuation filters) - -### Tests — join planning (provides-driven SELECT) - -#### `minimal-select-no-extra-joins` ✅ - -- **run:** pass -- **input:** filter that only accesses temporal metadata fields (web_id, entity_uuid) -- **tests:** only base table (`entity_temporal_metadata`) in FROM, no unnecessary joins -- **verifies:** lazy join planning: unused tables are not joined - -#### `properties-triggers-editions-join` ⏭ 📸 - -- **run:** skip -- **skip reason:** `vertex.properties` is generic `T` and requires a concrete entity type; - same fundamental limitation as `property-field-equality`. The editions join IS tested - indirectly by `entity-archived-check` which accesses `EntityPath::Archived`. -- **input:** filter accessing `vertex.properties` -- **tests:** `entity_editions` JOIN appears in FROM clause -- **verifies:** `EntityPath::Properties` correctly triggers the editions join -- **covered by:** Tier 3 `property_field_equality` snapshot test (editions join visible in - the `entity_editions_0_0_1` table alias) - -#### `entity-type-ids-lateral` ✅ - -- **run:** pass -- **input:** query that requires entity type IDs in output -- **tests:** LEFT JOIN LATERAL subquery for `entity_is_of_type_ids` with unnest + jsonb_agg -- **verifies:** computed column via lateral join, correct correlation condition on edition_id - -### Tests — query structure - -Note: temporal conditions (`&&` overlap on transaction_time and decision_time) are unconditional -— they appear in every compiled query. Rather than a standalone test, verify their presence in -the baseline `constant-true-filter` snapshot and spot-check in others. - -#### `property-mask` ❌ 📸 - -- **run:** pass (requires suite directive to inject mask, since mask comes from permission - system, not the query) -- **input:** query where `Properties` and `PropertyMetadata` are in the provides set (i.e. - they appear in SELECT because the interpreter needs them back, not just because the filter - references them) -- **suite directive:** `property_mask = true` (or similar) to inject a mask expression -- **tests:** `properties` and `property_metadata` SELECT expressions wrapped as `(col - mask)`, - other JSON expressions (e.g. `RecordId`'s `jsonb_build_object`) are NOT masked -- **verifies:** property mask applies only to property columns in the SELECT list, not to - filter-internal property references -- **covered by:** Tier 3 `property_mask` snapshot test (calls `with_property_mask()` directly) - -#### `multiple-filters` ✅ - -- **run:** pass -- **input:** graph read with two separate filter bodies -- **tests:** two CROSS JOIN LATERAL subqueries in FROM, each with OFFSET 0, two continuation - aliases (names start with `continuation_`), WHERE includes both `IS NOT FALSE` conditions, - SELECT decomposes both continuations with `*_block`, `*_locals`, `*_values` columns -- **verifies:** multi-filter compilation, correct SELECT column decomposition for both; - assert alias patterns rather than specific numeric ids - -### Non-goal: error diagnostics via compiletest - -All postgres diagnostics are `Severity::Bug` — they represent internal invariant violations -(closures, nested graph reads, function pointers, etc. placed into a Postgres island). The -placement pass would never produce these MIR shapes, so there is no valid J-Expr input that -triggers them through the full pipeline. These are tested as unit tests in Tier 2 (`error.rs`) -and optionally via hand-crafted MIR in Tier 3. - ---- - -## Tier 2: Unit Tests - -Standard `#[cfg(test)] mod tests` in the source files. Tests that would be tautological -(restating match arms, asserting structural constants) were dropped. - -### `parameters.rs` — deduplication ✅ (6 tests) - -Tests parameter deduplication and category isolation: same input → same index, different -inputs → different indices, cross-category isolation (`Input("x")` vs `Symbol("x")`), -temporal axis stability, and env field dedup. - -### `continuation.rs` — naming ✅ (2 tests) - -Tests continuation alias naming and field identifier construction. - -### `traverse.rs`, `projections.rs`, `error.rs` ⏭ - -Dropped — the entity path → SQL column mapping, lazy join planning, and diagnostic -constructors are exercised transitively through the Tier 1 compiletest suite and Tier 3 -snapshot tests with sufficient coverage. Standalone unit tests for these would either restate -match arms (traverse), assert structural constants (continuation column names), or test -unreachable error paths (diagnostics). - ---- - -## Tier 3: MIR Builder Snapshot Tests - -Programmatic MIR via `body!` macro, compiled through the real execution analysis pipeline. -These test MIR constructs that exist in the compiler but cannot yet be produced from J-Expr — -either because the HIR specialization phase doesn't support the intrinsic (e.g. arithmetic, -unary NOT) or because the type system can't resolve the access yet (e.g. property field -subscripts on generic entity types). - -Test location: `eval/src/postgres/filter/tests.rs` -Snapshots: `eval/tests/ui/postgres/filter/` - -### Shared test harness - -A `Fixture` struct that: - -1. Takes a `body!`-constructed MIR body with `Source::GraphReadFilter` -2. Runs `SizeEstimationAnalysis` + `ExecutionAnalysis::run_all_in` (the public API) to - compute island boundaries via the real solver -3. Stores bodies and execution residuals for compilation - -Two compile helpers: - -- `compile_filter_islands()` — compiles each Postgres exec island via - `GraphReadFilterCompiler::compile_body()`, returns per-island SQL expressions -- `compile_full_query()` — synthesizes a `GraphRead` and calls - `PostgresCompiler::compile()`, returns full SELECT + parameters - -**Island boundary control:** the solver decides placement based on cost. To force a -Postgres→Interpreter boundary, bb0 must accumulate enough transfer cost to exceed the P→I -switch cost (8). Use heavy entity path loads (properties, composites like RecordId, -TemporalVersioning) in bb0, and an `apply` in bb1 to force Interpreter. Lightweight paths -(single UUIDs) are insufficient because block splitting fragments the body. - -### Tests — data islands & provides integration - -#### `data_island_provides_without_lateral` ✅ - -- **body:** island graph where a non-Postgres island requires Postgres-origin traversal paths, - causing the resolver to insert a Postgres `IslandKind::Data` island -- **tests:** the data island contributes output columns to `provides` (so they appear in the - SELECT list with correct joins) but does NOT generate a continuation LATERAL subquery -- **verifies:** `compile_graph_read_filter_island()` returns `None` for data islands, - `provides.insert(island.provides())` still runs, no spurious CROSS JOIN LATERAL - -#### `provides_drives_select_and_joins` ✅ - -- **body:** entity path loads (EntityUuid, Archived) in bb0, apply in bb1 forces Interpreter; - uses `compile_full_query()` to exercise the full `PostgresCompiler::compile()` path -- **tests:** SELECT list includes provided paths with correct joins, continuation LATERAL - appears, parameter summary shows temporal axes and symbols -- **verifies:** end-to-end provides → traverse → projections → build_from integration - -### Tests — control flow edge cases - -#### `island_exit_goto` ✅ - -- **body:** heavy entity path loads (properties, composites) in bb0, apply in bb1 -- **tests:** `Goto` crossing island boundary → `Continuation::IslandExit` -- **verifies:** continuation has correct `block` id, `locals` array, `values` array, all - cast to `::continuation` - -#### `island_exit_with_live_out` ✅ - -- **body:** heavy entity path loads + input in bb0, apply in bb1; input is live-out -- **tests:** island exit captures both block parameters AND remaining live-out locals -- **verifies:** `locals` array contains block param ids first, then live-out local ids; - `values` array has corresponding expressions in same order - -#### `island_exit_switch_int` ✅ - -- **body:** heavy entity path loads + SwitchInt in bb0; bb1 returns, bb2 has apply -- **tests:** one CASE branch produces a `Return` continuation, the other produces an - `IslandExit` continuation -- **verifies:** mixed continuation types within a single CASE tree — one branch has - `(filter, NULL, NULL, NULL)`, the other has `(NULL, block, locals[], values[])` - -#### `diamond_cfg_merge` ✅ - -- **body:** bb0 branches (SwitchInt on input) to bb1 and bb2, both goto bb3 which returns; - all blocks in Postgres island -- **tests:** diamond CFG entirely within one island — both branches converge -- **verifies:** CASE with two branches, locals snapshot/rollback works correctly across - the diamond (bb1's local changes don't leak into bb2's compilation) - -#### `switch_int_many_branches` ✅ - -- **body:** SwitchInt on input with 4 value targets + otherwise -- **tests:** multi-way branch → CASE with 4 WHEN clauses + ELSE -- **verifies:** correct number of WHEN clauses in correct order, otherwise maps to ELSE - -#### `straight_line_goto_chain` ✅ - -- **body:** bb0 → bb1 → bb2 → return, all within Postgres island, with block parameters - passed at each goto via inputs -- **tests:** goto fast-path (no snapshot/rollback needed for linear chains), block parameter - assignment at each step -- **verifies:** gotos within island are followed directly without CASE, locals accumulate - correctly through the chain - -#### `island_exit_empty_arrays` ✅ - -- **body:** heavy entity path loads in bb0, apply in bb1; no locals from bb0 used by bb1 -- **tests:** continuation with empty `locals` and `values` arrays -- **verifies:** `ARRAY[]::int[]` and `ARRAY[]::jsonb[]` transpile correctly (edge case for - empty array literals with type cast) - -### Tests — projection kinds - -#### `field_index_projection` ✅ - -- **body:** tuple aggregate followed by `.0` numeric field projection -- **tests:** `ProjectionKind::Field(FieldIndex)` → `json_extract_path(base, (0)::text)` -- **verifies:** numeric field indices are cast to `::text` for json_extract_path - -#### `dynamic_index_projection` ✅ - -- **body:** list with Index projection where the key comes from an input (uses fluent builder - since `body!` doesn't support `ProjectionKind::Index`) -- **tests:** `ProjectionKind::Index(local)` → `json_extract_path(base, (local_expr)::text)` -- **verifies:** dynamic index expression is grouped and cast to `::text`, not confused with - static field names - -#### `field_by_name_projection` ✅ - -- **body:** struct field access using `ProjectionKind::FieldByName(symbol)` -- **tests:** symbol allocated as parameter, cast to `::text` for json_extract_path -- **verifies:** named field access uses `db.parameters.symbol()` and correct text cast - -### Tests — operator coverage - -These ensure all operator branches produce correct SQL with correct casts. - -**Priority:** these are the primary path for testing arithmetic and unary operators, since the -HIR specialization phase does not yet support `::core::math::*` (H-4728) or -`::core::bool::not` (H-4729) intrinsics. The Tier 1 compiletest tests for these operators are -skipped until the HIR catches up. - -#### `unary_neg` ✅ - -- **body:** `UnOp::Neg` applied to an input local -- **tests:** `UnaryOperator::Negate` in SQL output -- **verifies:** negation operator emits correctly - -#### `unary_not` ✅ - -- **body:** `UnOp::Not` applied to an input local -- **tests:** `UnaryOperator::Not` in SQL output -- **verifies:** logical NOT emits correctly - -#### `unary_bitnot` ✅ - -- **body:** `UnOp::BitNot` applied to a local -- **tests:** `UnaryOperator::BitwiseNot` in SQL output -- **verifies:** bitwise NOT emits correctly - -#### `binary_sub_numeric_cast` ✅ - -- **body:** `BinOp::Sub` on two input locals -- **tests:** `BinaryOperator::Subtract` with `::numeric` casts on both operands -- **verifies:** subtraction uses same cast logic as addition - -#### `binary_bitand_bigint_cast` ✅ - -- **body:** `BinOp::BitAnd` on two input locals -- **tests:** `BinaryOperator::BitwiseAnd` with `::bigint` casts on both operands -- **verifies:** bitwise ops use `bigint` cast (not `numeric`) - -### Tests — error diagnostics ⏭ - -All postgres diagnostics are `Severity::Bug` — internal invariant violations (closures, nested -graph reads, function pointers, etc. placed into a Postgres island). The public API -(`ExecutionAnalysis`) prevents invalid MIR from reaching the compiler: the placement solver -never assigns `Apply`, `Closure`, `FnPtr`, `GraphRead`, or projected assignments to Postgres -islands. These code paths are unreachable by construction, so testing them would require -bypassing the public API to hand-construct invalid island contents — which tests the test -harness, not the compiler. - ---- - -## Remaining Work - -1. **Tier 1 blocked on HIR:** `arithmetic-addition-casts` (H-4728). All other blocked Tier 1 - tests (`property-field-equality`, `nested-property-access`, `properties-triggers-editions-join`, - `logical-not-input`, `left-entity-filter`, `property-mask`) are now covered by snapshot tests. From 72ec1873c66a8639946a37b1d385a294372b3b5f Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 17:56:07 +0100 Subject: [PATCH 4/7] fix: suggestions from code review --- apps/hash-graph/docs/dependency-diagram.mmd | 6 +- .../rust/docs/dependency-diagram.mmd | 6 +- libs/@local/codec/docs/dependency-diagram.mmd | 6 +- .../codegen/docs/dependency-diagram.mmd | 6 +- .../graph/api/docs/dependency-diagram.mmd | 6 +- .../authorization/docs/dependency-diagram.mmd | 6 +- .../docs/dependency-diagram.mmd | 25 +++- .../migrations/docs/dependency-diagram.mmd | 34 +++-- .../docs/dependency-diagram.mmd | 56 +++++--- .../postgres/query/expression/conditional.rs | 2 + .../graph/store/docs/dependency-diagram.mmd | 6 +- .../docs/dependency-diagram.mmd | 6 +- .../test-server/docs/dependency-diagram.mmd | 6 +- .../graph/types/docs/dependency-diagram.mmd | 6 +- .../validation/docs/dependency-diagram.mmd | 46 ++++--- .../harpc/types/docs/dependency-diagram.mmd | 6 +- .../wire-protocol/docs/dependency-diagram.mmd | 6 +- .../hashql/ast/docs/dependency-diagram.mmd | 124 ++++++++++-------- .../compiletest/docs/dependency-diagram.mmd | 124 ++++++++++-------- .../hashql/core/docs/dependency-diagram.mmd | 3 +- .../diagnostics/docs/dependency-diagram.mmd | 3 +- .../hashql/eval/docs/dependency-diagram.mmd | 124 ++++++++++-------- libs/@local/hashql/eval/package.json | 4 +- .../hashql/eval/src/postgres/filter/mod.rs | 7 +- .../hashql/eval/src/postgres/filter/tests.rs | 2 +- libs/@local/hashql/eval/src/postgres/mod.rs | 9 ++ .../hashql/eval/src/postgres/traverse.rs | 5 +- .../ui/postgres/comparison-no-cast.stdout | 2 +- .../ui/postgres/constant-true-filter.stdout | 2 +- .../ui/postgres/dict-construction.stdout | 2 +- .../ui/postgres/entity-archived-check.stdout | 2 +- .../postgres/entity-type-ids-lateral.stdout | 2 +- .../ui/postgres/entity-uuid-equality.stdout | 2 +- .../ui/postgres/entity-web-id-equality.stdout | 2 +- .../ui/postgres/env-captured-variable.stdout | 2 +- .../filter/binary_bitand_bigint_cast.snap | 2 +- .../filter/binary_sub_numeric_cast.snap | 2 +- .../ui/postgres/filter/diamond_cfg_merge.snap | 2 +- .../filter/dynamic_index_projection.snap | 2 +- .../filter/field_by_name_projection.snap | 2 +- .../filter/field_index_projection.snap | 2 +- .../filter/island_exit_switch_int.snap | 2 +- .../postgres/filter/left_entity_filter.snap | 2 +- .../filter/nested_property_access.snap | 2 +- .../filter/property_field_equality.snap | 2 +- .../filter/straight_line_goto_chain.snap | 2 +- .../filter/switch_int_many_branches.snap | 2 +- .../ui/postgres/filter/unary_bitnot.snap | 2 +- .../tests/ui/postgres/filter/unary_neg.snap | 2 +- .../tests/ui/postgres/filter/unary_not.snap | 2 +- .../ui/postgres/if-input-branches.stdout | 2 +- .../ui/postgres/input-parameter-exists.stdout | 2 +- .../ui/postgres/input-parameter-load.stdout | 2 +- .../postgres/let-binding-propagation.stdout | 2 +- .../ui/postgres/list-construction.stdout | 2 +- .../ui/postgres/logical-and-inputs.stdout | 2 +- .../minimal-select-no-extra-joins.stdout | 2 +- .../ui/postgres/mixed-sources-filter.stdout | 4 +- .../tests/ui/postgres/multiple-filters.stdout | 4 +- .../postgres/nested-if-input-branches.stdout | 2 +- .../ui/postgres/opaque-passthrough.stdout | 2 +- .../ui/postgres/struct-construction.stdout | 2 +- .../ui/postgres/tuple-construction.stdout | 2 +- .../hashql/hir/docs/dependency-diagram.mmd | 124 ++++++++++-------- .../hashql/mir/docs/dependency-diagram.mmd | 124 ++++++++++-------- .../syntax-jexpr/docs/dependency-diagram.mmd | 124 ++++++++++-------- .../status/rust/docs/dependency-diagram.mmd | 33 +++-- .../telemetry/docs/dependency-diagram.mmd | 40 ++++-- .../docs/dependency-diagram.mmd | 6 +- .../rust/docs/dependency-diagram.mmd | 6 +- 70 files changed, 665 insertions(+), 508 deletions(-) diff --git a/apps/hash-graph/docs/dependency-diagram.mmd b/apps/hash-graph/docs/dependency-diagram.mmd index 7b648492b71..f9c26a72305 100644 --- a/apps/hash-graph/docs/dependency-diagram.mmd +++ b/apps/hash-graph/docs/dependency-diagram.mmd @@ -55,7 +55,6 @@ graph TD 1 -.-> 40 2 -.-> 3 2 --> 22 - 4 --> 8 4 --> 11 4 --> 12 4 --> 18 @@ -91,15 +90,14 @@ graph TD 22 --> 39 23 -.-> 24 24 --> 27 - 24 --> 30 24 --> 31 24 --> 39 25 --> 2 25 --> 26 25 --> 29 25 -.-> 36 - 27 --> 9 - 27 --> 28 + 27 --> 8 + 27 --> 30 28 -.-> 24 30 --> 28 31 --> 23 diff --git a/libs/@blockprotocol/type-system/rust/docs/dependency-diagram.mmd b/libs/@blockprotocol/type-system/rust/docs/dependency-diagram.mmd index 3ed38b0d71c..cee946bdefb 100644 --- a/libs/@blockprotocol/type-system/rust/docs/dependency-diagram.mmd +++ b/libs/@blockprotocol/type-system/rust/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 diff --git a/libs/@local/codec/docs/dependency-diagram.mmd b/libs/@local/codec/docs/dependency-diagram.mmd index a72ee71838c..b2c62ea4a08 100644 --- a/libs/@local/codec/docs/dependency-diagram.mmd +++ b/libs/@local/codec/docs/dependency-diagram.mmd @@ -45,7 +45,6 @@ graph TD 1 -.-> 30 2 -.-> 3 2 --> 18 - 4 --> 6 4 --> 9 4 --> 14 4 --> 22 @@ -71,12 +70,11 @@ graph TD 18 --> 27 19 -.-> 20 20 --> 22 - 20 --> 24 20 --> 25 20 --> 27 21 --> 2 - 22 --> 7 - 22 --> 23 + 22 --> 6 + 22 --> 24 23 -.-> 20 24 --> 23 25 --> 19 diff --git a/libs/@local/codegen/docs/dependency-diagram.mmd b/libs/@local/codegen/docs/dependency-diagram.mmd index 2b981d206a5..601be9ff4ba 100644 --- a/libs/@local/codegen/docs/dependency-diagram.mmd +++ b/libs/@local/codegen/docs/dependency-diagram.mmd @@ -41,7 +41,6 @@ graph TD 1 --> 8 1 -.-> 27 2 -.-> 3 - 4 --> 6 4 --> 9 4 --> 14 4 --> 20 @@ -64,11 +63,10 @@ graph TD 16 --> 13 17 -.-> 18 18 --> 20 - 18 --> 22 18 --> 23 19 --> 2 - 20 --> 7 - 20 --> 21 + 20 --> 6 + 20 --> 22 21 -.-> 18 22 --> 21 23 --> 17 diff --git a/libs/@local/graph/api/docs/dependency-diagram.mmd b/libs/@local/graph/api/docs/dependency-diagram.mmd index 8c732a72bd1..4eee95b4a8b 100644 --- a/libs/@local/graph/api/docs/dependency-diagram.mmd +++ b/libs/@local/graph/api/docs/dependency-diagram.mmd @@ -56,7 +56,6 @@ graph TD 1 -.-> 41 2 -.-> 3 2 --> 22 - 4 --> 8 4 --> 11 4 --> 12 4 --> 18 @@ -92,15 +91,14 @@ graph TD 22 --> 39 23 -.-> 24 24 --> 27 - 24 --> 30 24 --> 31 24 --> 39 25 --> 2 25 --> 26 25 --> 29 25 -.-> 36 - 27 --> 9 - 27 --> 28 + 27 --> 8 + 27 --> 30 28 -.-> 24 30 --> 28 31 --> 23 diff --git a/libs/@local/graph/authorization/docs/dependency-diagram.mmd b/libs/@local/graph/authorization/docs/dependency-diagram.mmd index 2e6aa888d26..a57506ba2cc 100644 --- a/libs/@local/graph/authorization/docs/dependency-diagram.mmd +++ b/libs/@local/graph/authorization/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 diff --git a/libs/@local/graph/migrations-macros/docs/dependency-diagram.mmd b/libs/@local/graph/migrations-macros/docs/dependency-diagram.mmd index 9612a35505c..427345a63b3 100644 --- a/libs/@local/graph/migrations-macros/docs/dependency-diagram.mmd +++ b/libs/@local/graph/migrations-macros/docs/dependency-diagram.mmd @@ -14,11 +14,26 @@ graph TD 3[hash-graph-migrations-macros] class 3 root 4[hash-graph-postgres-store] - 5[hash-graph-benches] - 6[hash-graph-integration] + 5[hashql-ast] + 6[hashql-compiletest] + 7[hashql-eval] + 8[hashql-hir] + 9[hashql-mir] + 10[hashql-syntax-jexpr] + 11[hash-graph-benches] + 12[hash-graph-integration] 0 --> 1 - 1 --> 4 + 1 --> 7 + 1 --> 10 2 --> 3 4 -.-> 2 - 5 -.-> 1 - 6 -.-> 4 + 5 -.-> 6 + 6 --> 7 + 6 --> 10 + 7 --> 4 + 7 --> 9 + 8 -.-> 6 + 9 --> 8 + 10 --> 5 + 11 -.-> 1 + 12 -.-> 4 diff --git a/libs/@local/graph/migrations/docs/dependency-diagram.mmd b/libs/@local/graph/migrations/docs/dependency-diagram.mmd index 242c2ffd226..e4c8194c510 100644 --- a/libs/@local/graph/migrations/docs/dependency-diagram.mmd +++ b/libs/@local/graph/migrations/docs/dependency-diagram.mmd @@ -14,15 +14,31 @@ graph TD class 2 root 3[hash-graph-migrations-macros] 4[hash-graph-postgres-store] - 5[hash-telemetry] - 6[error-stack] - 7[hash-graph-benches] - 8[hash-graph-integration] + 5[hashql-ast] + 6[hashql-compiletest] + 7[hashql-eval] + 8[hashql-hir] + 9[hashql-mir] + 10[hashql-syntax-jexpr] + 11[hash-telemetry] + 12[error-stack] + 13[hash-graph-benches] + 14[hash-graph-integration] 0 --> 1 - 1 --> 4 + 1 --> 7 + 1 --> 10 2 --> 3 - 2 --> 5 + 2 --> 11 4 -.-> 2 - 5 --> 6 - 7 -.-> 1 - 8 -.-> 4 + 5 -.-> 6 + 6 --> 7 + 6 --> 10 + 6 --> 12 + 7 --> 4 + 7 --> 9 + 8 -.-> 6 + 9 --> 8 + 10 --> 5 + 11 --> 12 + 13 -.-> 1 + 14 -.-> 4 diff --git a/libs/@local/graph/postgres-store/docs/dependency-diagram.mmd b/libs/@local/graph/postgres-store/docs/dependency-diagram.mmd index d7c861b68cb..da7204a41bb 100644 --- a/libs/@local/graph/postgres-store/docs/dependency-diagram.mmd +++ b/libs/@local/graph/postgres-store/docs/dependency-diagram.mmd @@ -24,36 +24,52 @@ graph TD 12[hash-graph-validation] 13[harpc-types] 14[harpc-wire-protocol] - 15[hash-status] - 16[hash-telemetry] - 17[hash-temporal-client] - 18[error-stack] - 19[hash-graph-benches] - 20[hash-graph-integration] - 21[hash-graph-test-data] + 15[hashql-ast] + 16[hashql-compiletest] + 17[hashql-eval] + 18[hashql-hir] + 19[hashql-mir] + 20[hashql-syntax-jexpr] + 21[hash-status] + 22[hash-telemetry] + 23[hash-temporal-client] + 24[error-stack] + 25[hash-graph-benches] + 26[hash-graph-integration] + 27[hash-graph-test-data] 0 --> 4 1 --> 10 - 1 -.-> 21 + 1 -.-> 27 2 -.-> 3 2 --> 14 - 4 --> 8 + 4 --> 17 + 4 --> 20 5 --> 1 6 --> 7 - 6 --> 16 + 6 --> 22 8 -.-> 6 8 --> 12 - 8 --> 15 + 8 --> 21 9 --> 5 9 --> 11 - 9 --> 17 + 9 --> 23 10 --> 2 - 11 -.-> 21 - 12 -.-> 21 + 11 -.-> 27 + 12 -.-> 27 14 -.-> 13 14 --> 13 - 14 --> 18 - 16 --> 18 - 17 --> 1 - 19 -.-> 4 - 20 -.-> 8 - 21 --> 9 + 14 --> 24 + 15 -.-> 16 + 16 --> 17 + 16 --> 20 + 16 --> 24 + 17 --> 8 + 17 --> 19 + 18 -.-> 16 + 19 --> 18 + 20 --> 15 + 22 --> 24 + 23 --> 1 + 25 -.-> 4 + 26 -.-> 8 + 27 --> 9 diff --git a/libs/@local/graph/postgres-store/src/store/postgres/query/expression/conditional.rs b/libs/@local/graph/postgres-store/src/store/postgres/query/expression/conditional.rs index 38b7cdf10e4..5845722d49d 100644 --- a/libs/@local/graph/postgres-store/src/store/postgres/query/expression/conditional.rs +++ b/libs/@local/graph/postgres-store/src/store/postgres/query/expression/conditional.rs @@ -208,6 +208,7 @@ pub enum PostgresType { Numeric, Int, BigInt, + Boolean, } impl Transpile for PostgresType { @@ -225,6 +226,7 @@ impl Transpile for PostgresType { Self::Numeric => fmt.write_str("numeric"), Self::Int => fmt.write_str("int"), Self::BigInt => fmt.write_str("bigint"), + Self::Boolean => fmt.write_str("boolean"), } } } diff --git a/libs/@local/graph/store/docs/dependency-diagram.mmd b/libs/@local/graph/store/docs/dependency-diagram.mmd index 9b9535b348f..9befe6246fe 100644 --- a/libs/@local/graph/store/docs/dependency-diagram.mmd +++ b/libs/@local/graph/store/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 diff --git a/libs/@local/graph/temporal-versioning/docs/dependency-diagram.mmd b/libs/@local/graph/temporal-versioning/docs/dependency-diagram.mmd index 9876628820a..4a62d2f471c 100644 --- a/libs/@local/graph/temporal-versioning/docs/dependency-diagram.mmd +++ b/libs/@local/graph/temporal-versioning/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 diff --git a/libs/@local/graph/test-server/docs/dependency-diagram.mmd b/libs/@local/graph/test-server/docs/dependency-diagram.mmd index 87719d33cc2..713da84c791 100644 --- a/libs/@local/graph/test-server/docs/dependency-diagram.mmd +++ b/libs/@local/graph/test-server/docs/dependency-diagram.mmd @@ -56,7 +56,6 @@ graph TD 1 -.-> 41 2 -.-> 3 2 --> 23 - 4 --> 8 4 --> 12 4 --> 13 4 --> 19 @@ -93,15 +92,14 @@ graph TD 23 --> 40 24 -.-> 25 25 --> 28 - 25 --> 31 25 --> 32 25 --> 40 26 --> 2 26 --> 27 26 --> 30 26 -.-> 37 - 28 --> 9 - 28 --> 29 + 28 --> 8 + 28 --> 31 29 -.-> 25 31 --> 29 32 --> 24 diff --git a/libs/@local/graph/types/docs/dependency-diagram.mmd b/libs/@local/graph/types/docs/dependency-diagram.mmd index 9c9b23b28eb..1ad837b1d54 100644 --- a/libs/@local/graph/types/docs/dependency-diagram.mmd +++ b/libs/@local/graph/types/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 diff --git a/libs/@local/graph/validation/docs/dependency-diagram.mmd b/libs/@local/graph/validation/docs/dependency-diagram.mmd index e0c53cf7b83..a7ea48b3400 100644 --- a/libs/@local/graph/validation/docs/dependency-diagram.mmd +++ b/libs/@local/graph/validation/docs/dependency-diagram.mmd @@ -22,29 +22,45 @@ graph TD class 10 root 11[harpc-types] 12[harpc-wire-protocol] - 13[hash-temporal-client] - 14[error-stack] - 15[hash-graph-benches] - 16[hash-graph-integration] - 17[hash-graph-test-data] + 13[hashql-ast] + 14[hashql-compiletest] + 15[hashql-eval] + 16[hashql-hir] + 17[hashql-mir] + 18[hashql-syntax-jexpr] + 19[hash-temporal-client] + 20[error-stack] + 21[hash-graph-benches] + 22[hash-graph-integration] + 23[hash-graph-test-data] 0 --> 4 1 --> 8 - 1 -.-> 17 + 1 -.-> 23 2 -.-> 3 2 --> 12 - 4 --> 6 + 4 --> 15 + 4 --> 18 5 --> 1 6 --> 10 7 --> 5 7 --> 9 - 7 --> 13 + 7 --> 19 8 --> 2 - 9 -.-> 17 - 10 -.-> 17 + 9 -.-> 23 + 10 -.-> 23 12 -.-> 11 12 --> 11 - 12 --> 14 - 13 --> 1 - 15 -.-> 4 - 16 -.-> 6 - 17 --> 7 + 12 --> 20 + 13 -.-> 14 + 14 --> 15 + 14 --> 18 + 14 --> 20 + 15 --> 6 + 15 --> 17 + 16 -.-> 14 + 17 --> 16 + 18 --> 13 + 19 --> 1 + 21 -.-> 4 + 22 -.-> 6 + 23 --> 7 diff --git a/libs/@local/harpc/types/docs/dependency-diagram.mmd b/libs/@local/harpc/types/docs/dependency-diagram.mmd index b576dfff075..a33077303be 100644 --- a/libs/@local/harpc/types/docs/dependency-diagram.mmd +++ b/libs/@local/harpc/types/docs/dependency-diagram.mmd @@ -43,7 +43,6 @@ graph TD 1 --> 7 1 -.-> 29 2 --> 18 - 3 --> 5 3 --> 8 3 --> 14 3 --> 22 @@ -71,11 +70,10 @@ graph TD 18 --> 17 19 -.-> 20 20 --> 22 - 20 --> 24 20 --> 25 21 --> 2 - 22 --> 6 - 22 --> 23 + 22 --> 5 + 22 --> 24 23 -.-> 20 24 --> 23 25 --> 19 diff --git a/libs/@local/harpc/wire-protocol/docs/dependency-diagram.mmd b/libs/@local/harpc/wire-protocol/docs/dependency-diagram.mmd index 84f5eafd4fb..eb86da026a0 100644 --- a/libs/@local/harpc/wire-protocol/docs/dependency-diagram.mmd +++ b/libs/@local/harpc/wire-protocol/docs/dependency-diagram.mmd @@ -43,7 +43,6 @@ graph TD 1 --> 7 1 -.-> 29 2 --> 17 - 3 --> 5 3 --> 8 3 --> 13 3 --> 21 @@ -69,12 +68,11 @@ graph TD 17 --> 26 18 -.-> 19 19 --> 21 - 19 --> 23 19 --> 24 19 --> 26 20 --> 2 - 21 --> 6 - 21 --> 22 + 21 --> 5 + 21 --> 23 22 -.-> 19 23 --> 22 24 --> 18 diff --git a/libs/@local/hashql/ast/docs/dependency-diagram.mmd b/libs/@local/hashql/ast/docs/dependency-diagram.mmd index 507aad89755..8ea54320abf 100644 --- a/libs/@local/hashql/ast/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/ast/docs/dependency-diagram.mmd @@ -14,63 +14,75 @@ graph TD 3[hash-codegen] 4[hash-graph-api] 5[hash-graph-authorization] - 6[hash-graph-store] - 7[hash-graph-temporal-versioning] - 8[hash-graph-types] - 9[harpc-types] - 10[harpc-wire-protocol] - 11[hashql-ast] - class 11 root - 12[hashql-compiletest] - 13[hashql-core] - 14[hashql-diagnostics] - 15[hashql-eval] - 16[hashql-hir] - 17[hashql-macros] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 6[hash-graph-migrations] + 7[hash-graph-migrations-macros] + 8[hash-graph-postgres-store] + 9[hash-graph-store] + 10[hash-graph-temporal-versioning] + 11[hash-graph-types] + 12[hash-graph-validation] + 13[harpc-types] + 14[harpc-wire-protocol] + 15[hashql-ast] + class 15 root + 16[hashql-compiletest] + 17[hashql-core] + 18[hashql-diagnostics] + 19[hashql-eval] + 20[hashql-hir] + 21[hashql-macros] + 22[hashql-mir] + 23[hashql-syntax-jexpr] + 24[hash-status] + 25[hash-telemetry] + 26[hash-temporal-client] + 27[darwin-kperf] + 28[darwin-kperf-criterion] + 29[darwin-kperf-events] + 30[darwin-kperf-sys] + 31[error-stack] + 32[hash-graph-benches] + 33[hash-graph-test-data] 0 --> 4 - 1 --> 7 - 1 -.-> 27 + 1 --> 10 + 1 -.-> 33 2 -.-> 3 - 2 --> 10 - 4 --> 15 + 2 --> 14 4 --> 19 + 4 --> 23 5 --> 1 - 6 --> 5 - 6 --> 8 - 6 --> 20 - 7 --> 2 - 8 -.-> 27 - 10 -.-> 9 - 10 --> 9 - 10 --> 25 - 11 -.-> 12 - 12 --> 15 - 12 --> 18 - 12 --> 19 - 12 --> 25 - 13 --> 2 - 13 --> 14 - 13 --> 17 - 13 -.-> 22 - 15 --> 6 - 15 --> 16 - 16 -.-> 12 - 18 --> 16 - 19 --> 11 - 19 --> 13 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 6 --> 7 + 6 --> 25 + 8 -.-> 6 + 8 --> 12 + 8 --> 24 + 9 --> 5 + 9 --> 11 + 9 --> 26 + 10 --> 2 + 11 -.-> 33 + 12 -.-> 33 + 14 -.-> 13 + 14 --> 13 + 14 --> 31 + 15 -.-> 16 + 16 --> 19 + 16 --> 23 + 16 --> 31 + 17 --> 2 + 17 --> 18 + 17 --> 21 + 17 -.-> 28 + 19 --> 8 + 19 --> 22 + 20 -.-> 16 + 22 --> 20 + 23 --> 15 + 23 --> 17 + 25 --> 31 + 26 --> 1 + 27 --> 29 + 27 --> 30 + 28 --> 27 + 32 -.-> 4 + 33 --> 9 diff --git a/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd b/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd index 4bb0c89cd24..f589b3979ef 100644 --- a/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd @@ -14,63 +14,75 @@ graph TD 3[hash-codegen] 4[hash-graph-api] 5[hash-graph-authorization] - 6[hash-graph-store] - 7[hash-graph-temporal-versioning] - 8[hash-graph-types] - 9[harpc-types] - 10[harpc-wire-protocol] - 11[hashql-ast] - 12[hashql-compiletest] - class 12 root - 13[hashql-core] - 14[hashql-diagnostics] - 15[hashql-eval] - 16[hashql-hir] - 17[hashql-macros] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 6[hash-graph-migrations] + 7[hash-graph-migrations-macros] + 8[hash-graph-postgres-store] + 9[hash-graph-store] + 10[hash-graph-temporal-versioning] + 11[hash-graph-types] + 12[hash-graph-validation] + 13[harpc-types] + 14[harpc-wire-protocol] + 15[hashql-ast] + 16[hashql-compiletest] + class 16 root + 17[hashql-core] + 18[hashql-diagnostics] + 19[hashql-eval] + 20[hashql-hir] + 21[hashql-macros] + 22[hashql-mir] + 23[hashql-syntax-jexpr] + 24[hash-status] + 25[hash-telemetry] + 26[hash-temporal-client] + 27[darwin-kperf] + 28[darwin-kperf-criterion] + 29[darwin-kperf-events] + 30[darwin-kperf-sys] + 31[error-stack] + 32[hash-graph-benches] + 33[hash-graph-test-data] 0 --> 4 - 1 --> 7 - 1 -.-> 27 + 1 --> 10 + 1 -.-> 33 2 -.-> 3 - 2 --> 10 - 4 --> 15 + 2 --> 14 4 --> 19 + 4 --> 23 5 --> 1 - 6 --> 5 - 6 --> 8 - 6 --> 20 - 7 --> 2 - 8 -.-> 27 - 10 -.-> 9 - 10 --> 9 - 10 --> 25 - 11 -.-> 12 - 12 --> 15 - 12 --> 18 - 12 --> 19 - 12 --> 25 - 13 --> 2 - 13 --> 14 - 13 --> 17 - 13 -.-> 22 - 15 --> 6 - 15 --> 16 - 16 -.-> 12 - 18 --> 16 - 19 --> 11 - 19 --> 13 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 6 --> 7 + 6 --> 25 + 8 -.-> 6 + 8 --> 12 + 8 --> 24 + 9 --> 5 + 9 --> 11 + 9 --> 26 + 10 --> 2 + 11 -.-> 33 + 12 -.-> 33 + 14 -.-> 13 + 14 --> 13 + 14 --> 31 + 15 -.-> 16 + 16 --> 19 + 16 --> 23 + 16 --> 31 + 17 --> 2 + 17 --> 18 + 17 --> 21 + 17 -.-> 28 + 19 --> 8 + 19 --> 22 + 20 -.-> 16 + 22 --> 20 + 23 --> 15 + 23 --> 17 + 25 --> 31 + 26 --> 1 + 27 --> 29 + 27 --> 30 + 28 --> 27 + 32 -.-> 4 + 33 --> 9 diff --git a/libs/@local/hashql/core/docs/dependency-diagram.mmd b/libs/@local/hashql/core/docs/dependency-diagram.mmd index 278981b1d50..c66e8d75920 100644 --- a/libs/@local/hashql/core/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/core/docs/dependency-diagram.mmd @@ -40,14 +40,13 @@ graph TD 5 --> 19 6 -.-> 7 7 --> 10 - 7 --> 13 7 --> 14 7 --> 19 8 --> 1 8 --> 9 8 --> 12 8 -.-> 16 - 10 --> 11 + 10 --> 13 11 -.-> 7 13 --> 11 14 --> 6 diff --git a/libs/@local/hashql/diagnostics/docs/dependency-diagram.mmd b/libs/@local/hashql/diagnostics/docs/dependency-diagram.mmd index fbbc6245898..c586b711b54 100644 --- a/libs/@local/hashql/diagnostics/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/diagnostics/docs/dependency-diagram.mmd @@ -25,10 +25,9 @@ graph TD 1 --> 9 2 -.-> 3 3 --> 6 - 3 --> 8 3 --> 9 4 --> 5 - 6 --> 7 + 6 --> 8 7 -.-> 3 8 --> 7 9 --> 2 diff --git a/libs/@local/hashql/eval/docs/dependency-diagram.mmd b/libs/@local/hashql/eval/docs/dependency-diagram.mmd index dca63d1fc2c..cfc298ac746 100644 --- a/libs/@local/hashql/eval/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/eval/docs/dependency-diagram.mmd @@ -14,63 +14,75 @@ graph TD 3[hash-codegen] 4[hash-graph-api] 5[hash-graph-authorization] - 6[hash-graph-store] - 7[hash-graph-temporal-versioning] - 8[hash-graph-types] - 9[harpc-types] - 10[harpc-wire-protocol] - 11[hashql-ast] - 12[hashql-compiletest] - 13[hashql-core] - 14[hashql-diagnostics] - 15[hashql-eval] - class 15 root - 16[hashql-hir] - 17[hashql-macros] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 6[hash-graph-migrations] + 7[hash-graph-migrations-macros] + 8[hash-graph-postgres-store] + 9[hash-graph-store] + 10[hash-graph-temporal-versioning] + 11[hash-graph-types] + 12[hash-graph-validation] + 13[harpc-types] + 14[harpc-wire-protocol] + 15[hashql-ast] + 16[hashql-compiletest] + 17[hashql-core] + 18[hashql-diagnostics] + 19[hashql-eval] + class 19 root + 20[hashql-hir] + 21[hashql-macros] + 22[hashql-mir] + 23[hashql-syntax-jexpr] + 24[hash-status] + 25[hash-telemetry] + 26[hash-temporal-client] + 27[darwin-kperf] + 28[darwin-kperf-criterion] + 29[darwin-kperf-events] + 30[darwin-kperf-sys] + 31[error-stack] + 32[hash-graph-benches] + 33[hash-graph-test-data] 0 --> 4 - 1 --> 7 - 1 -.-> 27 + 1 --> 10 + 1 -.-> 33 2 -.-> 3 - 2 --> 10 - 4 --> 15 + 2 --> 14 4 --> 19 + 4 --> 23 5 --> 1 - 6 --> 5 - 6 --> 8 - 6 --> 20 - 7 --> 2 - 8 -.-> 27 - 10 -.-> 9 - 10 --> 9 - 10 --> 25 - 11 -.-> 12 - 12 --> 15 - 12 --> 18 - 12 --> 19 - 12 --> 25 - 13 --> 2 - 13 --> 14 - 13 --> 17 - 13 -.-> 22 - 15 --> 6 - 15 --> 16 - 16 -.-> 12 - 18 --> 16 - 19 --> 11 - 19 --> 13 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 6 --> 7 + 6 --> 25 + 8 -.-> 6 + 8 --> 12 + 8 --> 24 + 9 --> 5 + 9 --> 11 + 9 --> 26 + 10 --> 2 + 11 -.-> 33 + 12 -.-> 33 + 14 -.-> 13 + 14 --> 13 + 14 --> 31 + 15 -.-> 16 + 16 --> 19 + 16 --> 23 + 16 --> 31 + 17 --> 2 + 17 --> 18 + 17 --> 21 + 17 -.-> 28 + 19 --> 8 + 19 --> 22 + 20 -.-> 16 + 22 --> 20 + 23 --> 15 + 23 --> 17 + 25 --> 31 + 26 --> 1 + 27 --> 29 + 27 --> 30 + 28 --> 27 + 32 -.-> 4 + 33 --> 9 diff --git a/libs/@local/hashql/eval/package.json b/libs/@local/hashql/eval/package.json index 9ae0e374bdc..d08b21e799e 100644 --- a/libs/@local/hashql/eval/package.json +++ b/libs/@local/hashql/eval/package.json @@ -11,9 +11,11 @@ }, "dependencies": { "@blockprotocol/type-system-rs": "workspace:*", + "@rust/hash-graph-postgres-store": "workspace:*", "@rust/hash-graph-store": "workspace:*", "@rust/hashql-core": "workspace:*", "@rust/hashql-diagnostics": "workspace:*", - "@rust/hashql-hir": "workspace:*" + "@rust/hashql-hir": "workspace:*", + "@rust/hashql-mir": "workspace:*" } } diff --git a/libs/@local/hashql/eval/src/postgres/filter/mod.rs b/libs/@local/hashql/eval/src/postgres/filter/mod.rs index 2486472cdf6..285dc0dafd4 100644 --- a/libs/@local/hashql/eval/src/postgres/filter/mod.rs +++ b/libs/@local/hashql/eval/src/postgres/filter/mod.rs @@ -95,7 +95,12 @@ impl From for Expression { // (filter, block, locals, values) let row = match continuation { Continuation::Return { filter } => { - vec![filter, null.clone(), null.clone(), null] + vec![ + filter.grouped().cast(PostgresType::Boolean), + null.clone(), + null.clone(), + null, + ] } Continuation::IslandExit { block, diff --git a/libs/@local/hashql/eval/src/postgres/filter/tests.rs b/libs/@local/hashql/eval/src/postgres/filter/tests.rs index 3d0f1ffad01..41035645467 100644 --- a/libs/@local/hashql/eval/src/postgres/filter/tests.rs +++ b/libs/@local/hashql/eval/src/postgres/filter/tests.rs @@ -196,7 +196,7 @@ fn find_entry_block( return block; } } - BasicBlockId::START + unreachable!("The postgres island always has an entry block (BasicBlockId::START)") } struct QueryReport { sql: String, diff --git a/libs/@local/hashql/eval/src/postgres/mod.rs b/libs/@local/hashql/eval/src/postgres/mod.rs index 717c1fa1368..8fcc1c1d05f 100644 --- a/libs/@local/hashql/eval/src/postgres/mod.rs +++ b/libs/@local/hashql/eval/src/postgres/mod.rs @@ -360,6 +360,15 @@ impl<'eval, 'ctx, 'heap, A: Allocator, S: BumpAllocator> // Build FROM: base table + joins + CROSS JOIN LATERALs let from = db.projections.build_from(&mut db.parameters, db.laterals); + // Ensure there's at least one select expression - PostgreSQL requires a non-empty select + // list + if select_expressions.is_empty() { + select_expressions.push(SelectExpression::Expression { + expression: Expression::Constant(query::Constant::U32(1)), + alias: Some(Identifier::from("placeholder")), + }); + } + let query = SelectStatement::builder() .selects(select_expressions) .from(from) diff --git a/libs/@local/hashql/eval/src/postgres/traverse.rs b/libs/@local/hashql/eval/src/postgres/traverse.rs index 0413897bf8e..460b0af6235 100644 --- a/libs/@local/hashql/eval/src/postgres/traverse.rs +++ b/libs/@local/hashql/eval/src/postgres/traverse.rs @@ -29,7 +29,10 @@ pub(crate) fn eval_entity_path( correlation: Some(db.projections.entity_editions()), name: Column::EntityEditions(table::EntityEditions::Properties).into(), }), - EntityPath::Vectors => unreachable!("embeddings are not supported in postgres"), + EntityPath::Vectors => unreachable!( + "entity vectors should never reach postgres compilation; the placement pass should \ + have rejected this" + ), EntityPath::RecordId => Expression::Function(query::Function::JsonBuildObject(vec![ ( db.parameters.symbol(sym::entity_id).into(), diff --git a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout index 6550385338f..3a0a5727ac9 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW($2 > $3, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((($2 > $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout index e6a63637097..c8ba866a07c 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout @@ -1,6 +1,6 @@ ════ SQL ═══════════════════════════════════════════════════════════════════════ -SELECT +SELECT 1 AS "placeholder" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 diff --git a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout index e491f375159..fbb362dcc7a 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_object("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $3), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_object("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $3))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout index 957721b893b..e2e71700f42 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout @@ -4,7 +4,7 @@ SELECT ("continuation_1_0"."row")."block" AS "continuation_1_0_block", ("continu FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" -CROSS JOIN LATERAL (SELECT (ROW(NOT("entity_editions_0_0_1"."archived"), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((NOT("entity_editions_0_0_1"."archived"))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_1_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_1_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout index 96bf4b6b865..b417cb5ee5a 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout @@ -7,7 +7,7 @@ FROM "entity_is_of_type_ids" AS "eit" CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_1" ON TRUE -CROSS JOIN LATERAL (SELECT (ROW("entity_is_of_type_ids_0_0_1"."entity_type_ids" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_is_of_type_ids_0_0_1"."entity_type_ids" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout index 56444169f78..f6063f92ac0 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_7_0"."row")."block" AS "continuation_7_0_block", ("continuation_7_0"."row")."locals" AS "continuation_7_0_locals", ("continuation_7_0"."row")."values" AS "continuation_7_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_7_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_7_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout index b96be9a8563..751ecba136f 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."web_id" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout index 919229b1639..7b6d8fae6d6 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_0_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap index 1c20e927e11..782f00a9812 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW((($0)::bigint) & (($1)::bigint), NULL, NULL, NULL)::continuation) +(ROW((((($0)::bigint) & (($1)::bigint))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap index 39c16eff221..a100edd1cf0 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW((($0)::numeric) - (($1)::numeric), NULL, NULL, NULL)::continuation) +(ROW((((($0)::numeric) - (($1)::numeric))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap index 1e9105a703b..94f52bcc981 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -CASE WHEN (($0)::int) = 0 THEN (ROW(0, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(1, NULL, NULL, NULL)::continuation) END +CASE WHEN (($0)::int) = 0 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap index 090042496c2..74197698106 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(jsonb_extract_path(jsonb_build_array(10, 20, 30), (($0)::text)), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path(jsonb_build_array(10, 20, 30), (($0)::text)))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap index 86076201ec1..81ebb1c3773 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(jsonb_extract_path(jsonb_build_object($0, 10, $1, 20), (($0)::text)), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path(jsonb_build_object($0, 10, $1, 20), (($0)::text)))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap index 255ac39acf3..9254d49ff99 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_index_projection.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(jsonb_extract_path(jsonb_build_array(10, 20), ((0)::text)), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path(jsonb_build_array(10, 20), ((0)::text)))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap index 0817b4696aa..3d7dbed0692 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -CASE WHEN (($6)::int) = 0 THEN (ROW(NULL, 2, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) WHEN (($6)::int) = 1 THEN (ROW(1, NULL, NULL, NULL)::continuation) END +CASE WHEN (($6)::int) = 0 THEN (ROW(NULL, 2, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) WHEN (($6)::int) = 1 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap index 0dacf67c35d..5ae77a37836 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW("entity_has_left_entity_0_0_1"."left_entity_uuid" = $0, NULL, NULL, NULL)::continuation) +(ROW((("entity_has_left_entity_0_0_1"."left_entity_uuid" = $0)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap index 26e1b17745d..7f13cbd8fe0 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text), (($1)::text)) = $2, NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text), (($1)::text)) = $2)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap index 1d66026777a..287afa325a8 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text)) = $1, NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text)) = $1)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap index 468494abd36..fee48eda83e 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW($0, NULL, NULL, NULL)::continuation) +(ROW((($0)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap index ec6c8b5269b..8444de9287a 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -CASE WHEN (($0)::int) = 0 THEN (ROW(1, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(0, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 2 THEN (ROW(1, NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 3 THEN (ROW(0, NULL, NULL, NULL)::continuation) ELSE (ROW(1, NULL, NULL, NULL)::continuation) END +CASE WHEN (($0)::int) = 0 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 2 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 3 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) ELSE (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap index c44b14d5fd5..e742bdb3940 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(~($0), NULL, NULL, NULL)::continuation) +(ROW(((~($0))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap index abbb31c653c..abaca23ebf9 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(-($0), NULL, NULL, NULL)::continuation) +(ROW(((-($0))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap index f2066fb8906..8ce8c480b00 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(NOT($0), NULL, NULL, NULL)::continuation) +(ROW(((NOT($0))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout index 8e4bcee5cf2..eb21a3b7170 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $3, NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $4, NULL, NULL, NULL)::continuation) END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $4)::boolean), NULL, NULL, NULL)::continuation) END AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout index baaa1eb2e02..608d4990518 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2 IS NOT NULL)::int) = 0 THEN (ROW(1, NULL, NULL, NULL)::continuation) WHEN (($2 IS NOT NULL)::int) = 1 THEN (ROW($2, NULL, NULL, NULL)::continuation) END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2 IS NOT NULL)::int) = 0 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2 IS NOT NULL)::int) = 1 THEN (ROW((($2)::boolean), NULL, NULL, NULL)::continuation) END AS "row" OFFSET 0) AS "continuation_3_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout index f2c08adb13e..01ab6aa78be 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout index f2c08adb13e..01ab6aa78be 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout index 6da76e10685..3da14761d00 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", $2) = jsonb_build_array($3, "entity_temporal_metadata_0_0_0"."entity_uuid"), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", $2) = jsonb_build_array($3, "entity_temporal_metadata_0_0_0"."entity_uuid"))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_3_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout index d3ae0a3d6a5..861205c909e 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW(0, NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW($3, NULL, NULL, NULL)::continuation) END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW((($3)::boolean), NULL, NULL, NULL)::continuation) END AS "row" OFFSET 0) AS "continuation_3_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout index 590f4337436..3de5f5112f2 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."web_id" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout index 48f498a6cb3..2458d6a1604 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout @@ -4,9 +4,9 @@ SELECT ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continu FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" -CROSS JOIN LATERAL (SELECT (ROW(NOT("entity_editions_0_0_1"."archived"), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((NOT("entity_editions_0_0_1"."archived"))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_1_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE AND ("continuation_1_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout index f952da11bca..6995c838107 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout @@ -2,9 +2,9 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values", ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_3_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."web_id" = $3, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE AND ("continuation_4_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout index 6e27450ca62..06a006137ec 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $3, NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN CASE WHEN (($4)::int) = 0 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $5, NULL, NULL, NULL)::continuation) WHEN (($4)::int) = 1 THEN (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $6, NULL, NULL, NULL)::continuation) END END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN CASE WHEN (($4)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $5)::boolean), NULL, NULL, NULL)::continuation) WHEN (($4)::int) = 1 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $6)::boolean), NULL, NULL, NULL)::continuation) END END AS "row" OFFSET 0) AS "continuation_2_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout index aaeb8a8d408..891cf808e09 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_8_0"."row")."block" AS "continuation_8_0_block", ("continuation_8_0"."row")."locals" AS "continuation_8_0_locals", ("continuation_8_0"."row")."values" AS "continuation_8_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW("entity_temporal_metadata_0_0_0"."entity_uuid" = $2, NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_8_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_8_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout index 95f2814ac7a..5ac47c9b6af 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_object($2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $4, $3, $5), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_object($2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $4, $3, $5))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout index 99c19319d40..b0f5d2884db 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout @@ -2,7 +2,7 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_array($2, $3), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_array($2, $3))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE diff --git a/libs/@local/hashql/hir/docs/dependency-diagram.mmd b/libs/@local/hashql/hir/docs/dependency-diagram.mmd index b207ed26a22..cbc9b89ff6f 100644 --- a/libs/@local/hashql/hir/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/hir/docs/dependency-diagram.mmd @@ -14,63 +14,75 @@ graph TD 3[hash-codegen] 4[hash-graph-api] 5[hash-graph-authorization] - 6[hash-graph-store] - 7[hash-graph-temporal-versioning] - 8[hash-graph-types] - 9[harpc-types] - 10[harpc-wire-protocol] - 11[hashql-ast] - 12[hashql-compiletest] - 13[hashql-core] - 14[hashql-diagnostics] - 15[hashql-eval] - 16[hashql-hir] - class 16 root - 17[hashql-macros] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 6[hash-graph-migrations] + 7[hash-graph-migrations-macros] + 8[hash-graph-postgres-store] + 9[hash-graph-store] + 10[hash-graph-temporal-versioning] + 11[hash-graph-types] + 12[hash-graph-validation] + 13[harpc-types] + 14[harpc-wire-protocol] + 15[hashql-ast] + 16[hashql-compiletest] + 17[hashql-core] + 18[hashql-diagnostics] + 19[hashql-eval] + 20[hashql-hir] + class 20 root + 21[hashql-macros] + 22[hashql-mir] + 23[hashql-syntax-jexpr] + 24[hash-status] + 25[hash-telemetry] + 26[hash-temporal-client] + 27[darwin-kperf] + 28[darwin-kperf-criterion] + 29[darwin-kperf-events] + 30[darwin-kperf-sys] + 31[error-stack] + 32[hash-graph-benches] + 33[hash-graph-test-data] 0 --> 4 - 1 --> 7 - 1 -.-> 27 + 1 --> 10 + 1 -.-> 33 2 -.-> 3 - 2 --> 10 - 4 --> 15 + 2 --> 14 4 --> 19 + 4 --> 23 5 --> 1 - 6 --> 5 - 6 --> 8 - 6 --> 20 - 7 --> 2 - 8 -.-> 27 - 10 -.-> 9 - 10 --> 9 - 10 --> 25 - 11 -.-> 12 - 12 --> 15 - 12 --> 18 - 12 --> 19 - 12 --> 25 - 13 --> 2 - 13 --> 14 - 13 --> 17 - 13 -.-> 22 - 15 --> 6 - 15 --> 16 - 16 -.-> 12 - 18 --> 16 - 19 --> 11 - 19 --> 13 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 6 --> 7 + 6 --> 25 + 8 -.-> 6 + 8 --> 12 + 8 --> 24 + 9 --> 5 + 9 --> 11 + 9 --> 26 + 10 --> 2 + 11 -.-> 33 + 12 -.-> 33 + 14 -.-> 13 + 14 --> 13 + 14 --> 31 + 15 -.-> 16 + 16 --> 19 + 16 --> 23 + 16 --> 31 + 17 --> 2 + 17 --> 18 + 17 --> 21 + 17 -.-> 28 + 19 --> 8 + 19 --> 22 + 20 -.-> 16 + 22 --> 20 + 23 --> 15 + 23 --> 17 + 25 --> 31 + 26 --> 1 + 27 --> 29 + 27 --> 30 + 28 --> 27 + 32 -.-> 4 + 33 --> 9 diff --git a/libs/@local/hashql/mir/docs/dependency-diagram.mmd b/libs/@local/hashql/mir/docs/dependency-diagram.mmd index 48c7d663992..af09d1d03f2 100644 --- a/libs/@local/hashql/mir/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/mir/docs/dependency-diagram.mmd @@ -14,63 +14,75 @@ graph TD 3[hash-codegen] 4[hash-graph-api] 5[hash-graph-authorization] - 6[hash-graph-store] - 7[hash-graph-temporal-versioning] - 8[hash-graph-types] - 9[harpc-types] - 10[harpc-wire-protocol] - 11[hashql-ast] - 12[hashql-compiletest] - 13[hashql-core] - 14[hashql-diagnostics] - 15[hashql-eval] - 16[hashql-hir] - 17[hashql-macros] - 18[hashql-mir] - class 18 root - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 6[hash-graph-migrations] + 7[hash-graph-migrations-macros] + 8[hash-graph-postgres-store] + 9[hash-graph-store] + 10[hash-graph-temporal-versioning] + 11[hash-graph-types] + 12[hash-graph-validation] + 13[harpc-types] + 14[harpc-wire-protocol] + 15[hashql-ast] + 16[hashql-compiletest] + 17[hashql-core] + 18[hashql-diagnostics] + 19[hashql-eval] + 20[hashql-hir] + 21[hashql-macros] + 22[hashql-mir] + class 22 root + 23[hashql-syntax-jexpr] + 24[hash-status] + 25[hash-telemetry] + 26[hash-temporal-client] + 27[darwin-kperf] + 28[darwin-kperf-criterion] + 29[darwin-kperf-events] + 30[darwin-kperf-sys] + 31[error-stack] + 32[hash-graph-benches] + 33[hash-graph-test-data] 0 --> 4 - 1 --> 7 - 1 -.-> 27 + 1 --> 10 + 1 -.-> 33 2 -.-> 3 - 2 --> 10 - 4 --> 15 + 2 --> 14 4 --> 19 + 4 --> 23 5 --> 1 - 6 --> 5 - 6 --> 8 - 6 --> 20 - 7 --> 2 - 8 -.-> 27 - 10 -.-> 9 - 10 --> 9 - 10 --> 25 - 11 -.-> 12 - 12 --> 15 - 12 --> 18 - 12 --> 19 - 12 --> 25 - 13 --> 2 - 13 --> 14 - 13 --> 17 - 13 -.-> 22 - 15 --> 6 - 15 --> 16 - 16 -.-> 12 - 18 --> 16 - 19 --> 11 - 19 --> 13 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 6 --> 7 + 6 --> 25 + 8 -.-> 6 + 8 --> 12 + 8 --> 24 + 9 --> 5 + 9 --> 11 + 9 --> 26 + 10 --> 2 + 11 -.-> 33 + 12 -.-> 33 + 14 -.-> 13 + 14 --> 13 + 14 --> 31 + 15 -.-> 16 + 16 --> 19 + 16 --> 23 + 16 --> 31 + 17 --> 2 + 17 --> 18 + 17 --> 21 + 17 -.-> 28 + 19 --> 8 + 19 --> 22 + 20 -.-> 16 + 22 --> 20 + 23 --> 15 + 23 --> 17 + 25 --> 31 + 26 --> 1 + 27 --> 29 + 27 --> 30 + 28 --> 27 + 32 -.-> 4 + 33 --> 9 diff --git a/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd b/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd index beb0abc8ae3..e63e877974f 100644 --- a/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd @@ -14,63 +14,75 @@ graph TD 3[hash-codegen] 4[hash-graph-api] 5[hash-graph-authorization] - 6[hash-graph-store] - 7[hash-graph-temporal-versioning] - 8[hash-graph-types] - 9[harpc-types] - 10[harpc-wire-protocol] - 11[hashql-ast] - 12[hashql-compiletest] - 13[hashql-core] - 14[hashql-diagnostics] - 15[hashql-eval] - 16[hashql-hir] - 17[hashql-macros] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - class 19 root - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 6[hash-graph-migrations] + 7[hash-graph-migrations-macros] + 8[hash-graph-postgres-store] + 9[hash-graph-store] + 10[hash-graph-temporal-versioning] + 11[hash-graph-types] + 12[hash-graph-validation] + 13[harpc-types] + 14[harpc-wire-protocol] + 15[hashql-ast] + 16[hashql-compiletest] + 17[hashql-core] + 18[hashql-diagnostics] + 19[hashql-eval] + 20[hashql-hir] + 21[hashql-macros] + 22[hashql-mir] + 23[hashql-syntax-jexpr] + class 23 root + 24[hash-status] + 25[hash-telemetry] + 26[hash-temporal-client] + 27[darwin-kperf] + 28[darwin-kperf-criterion] + 29[darwin-kperf-events] + 30[darwin-kperf-sys] + 31[error-stack] + 32[hash-graph-benches] + 33[hash-graph-test-data] 0 --> 4 - 1 --> 7 - 1 -.-> 27 + 1 --> 10 + 1 -.-> 33 2 -.-> 3 - 2 --> 10 - 4 --> 15 + 2 --> 14 4 --> 19 + 4 --> 23 5 --> 1 - 6 --> 5 - 6 --> 8 - 6 --> 20 - 7 --> 2 - 8 -.-> 27 - 10 -.-> 9 - 10 --> 9 - 10 --> 25 - 11 -.-> 12 - 12 --> 15 - 12 --> 18 - 12 --> 19 - 12 --> 25 - 13 --> 2 - 13 --> 14 - 13 --> 17 - 13 -.-> 22 - 15 --> 6 - 15 --> 16 - 16 -.-> 12 - 18 --> 16 - 19 --> 11 - 19 --> 13 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 6 --> 7 + 6 --> 25 + 8 -.-> 6 + 8 --> 12 + 8 --> 24 + 9 --> 5 + 9 --> 11 + 9 --> 26 + 10 --> 2 + 11 -.-> 33 + 12 -.-> 33 + 14 -.-> 13 + 14 --> 13 + 14 --> 31 + 15 -.-> 16 + 16 --> 19 + 16 --> 23 + 16 --> 31 + 17 --> 2 + 17 --> 18 + 17 --> 21 + 17 -.-> 28 + 19 --> 8 + 19 --> 22 + 20 -.-> 16 + 22 --> 20 + 23 --> 15 + 23 --> 17 + 25 --> 31 + 26 --> 1 + 27 --> 29 + 27 --> 30 + 28 --> 27 + 32 -.-> 4 + 33 --> 9 diff --git a/libs/@local/status/rust/docs/dependency-diagram.mmd b/libs/@local/status/rust/docs/dependency-diagram.mmd index d0b457c7914..a9df8779a27 100644 --- a/libs/@local/status/rust/docs/dependency-diagram.mmd +++ b/libs/@local/status/rust/docs/dependency-diagram.mmd @@ -12,14 +12,29 @@ graph TD 1[hash-graph-api] 2[hash-graph-postgres-store] 3[hash-graph-type-defs] - 4[hash-status] - class 4 root - 5[hash-graph-benches] - 6[hash-graph-integration] + 4[hashql-ast] + 5[hashql-compiletest] + 6[hashql-eval] + 7[hashql-hir] + 8[hashql-mir] + 9[hashql-syntax-jexpr] + 10[hash-status] + class 10 root + 11[hash-graph-benches] + 12[hash-graph-integration] 0 --> 1 - 1 --> 2 1 --> 3 - 2 --> 4 - 3 --> 4 - 5 -.-> 1 - 6 -.-> 2 + 1 --> 6 + 1 --> 9 + 2 --> 10 + 3 --> 10 + 4 -.-> 5 + 5 --> 6 + 5 --> 9 + 6 --> 2 + 6 --> 8 + 7 -.-> 5 + 8 --> 7 + 9 --> 4 + 11 -.-> 1 + 12 -.-> 2 diff --git a/libs/@local/telemetry/docs/dependency-diagram.mmd b/libs/@local/telemetry/docs/dependency-diagram.mmd index 8247ab706b0..b781434d24e 100644 --- a/libs/@local/telemetry/docs/dependency-diagram.mmd +++ b/libs/@local/telemetry/docs/dependency-diagram.mmd @@ -12,18 +12,34 @@ graph TD 1[hash-graph-api] 2[hash-graph-migrations] 3[hash-graph-postgres-store] - 4[hash-repo-chores] - 5[hash-telemetry] - class 5 root - 6[error-stack] - 7[hash-graph-benches] - 8[hash-graph-integration] + 4[hashql-ast] + 5[hashql-compiletest] + 6[hashql-eval] + 7[hashql-hir] + 8[hashql-mir] + 9[hashql-syntax-jexpr] + 10[hash-repo-chores] + 11[hash-telemetry] + class 11 root + 12[error-stack] + 13[hash-graph-benches] + 14[hash-graph-integration] 0 --> 1 - 1 --> 3 - 2 --> 5 + 1 --> 6 + 1 --> 9 + 2 --> 11 3 -.-> 2 - 4 --> 5 + 4 -.-> 5 5 --> 6 - 7 -.-> 1 - 7 -.-> 4 - 8 -.-> 3 + 5 --> 9 + 5 --> 12 + 6 --> 3 + 6 --> 8 + 7 -.-> 5 + 8 --> 7 + 9 --> 4 + 10 --> 11 + 11 --> 12 + 13 -.-> 1 + 13 -.-> 10 + 14 -.-> 3 diff --git a/libs/@local/temporal-client/docs/dependency-diagram.mmd b/libs/@local/temporal-client/docs/dependency-diagram.mmd index bf78a0c203f..8ff26bd1573 100644 --- a/libs/@local/temporal-client/docs/dependency-diagram.mmd +++ b/libs/@local/temporal-client/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 diff --git a/tests/graph/test-data/rust/docs/dependency-diagram.mmd b/tests/graph/test-data/rust/docs/dependency-diagram.mmd index 795d4922370..d2f3fa16477 100644 --- a/tests/graph/test-data/rust/docs/dependency-diagram.mmd +++ b/tests/graph/test-data/rust/docs/dependency-diagram.mmd @@ -40,7 +40,6 @@ graph TD 1 -.-> 25 2 -.-> 3 2 --> 14 - 4 --> 6 4 --> 9 4 --> 12 4 --> 17 @@ -61,11 +60,10 @@ graph TD 14 --> 22 15 -.-> 16 16 --> 17 - 16 --> 19 16 --> 20 16 --> 22 - 17 --> 7 - 17 --> 18 + 17 --> 6 + 17 --> 19 18 -.-> 16 19 --> 18 20 --> 15 From c5c3b77fa1f91c21a068e2aa92c9dc4a9d6b8bdc Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 17:57:56 +0100 Subject: [PATCH 5/7] chore: lockfile --- yarn.lock | 2 ++ 1 file changed, 2 insertions(+) diff --git a/yarn.lock b/yarn.lock index 8b34373fb4a..f87ff9f0602 100644 --- a/yarn.lock +++ b/yarn.lock @@ -15157,10 +15157,12 @@ __metadata: resolution: "@rust/hashql-eval@workspace:libs/@local/hashql/eval" dependencies: "@blockprotocol/type-system-rs": "workspace:*" + "@rust/hash-graph-postgres-store": "workspace:*" "@rust/hash-graph-store": "workspace:*" "@rust/hashql-core": "workspace:*" "@rust/hashql-diagnostics": "workspace:*" "@rust/hashql-hir": "workspace:*" + "@rust/hashql-mir": "workspace:*" languageName: unknown linkType: soft From 773666762f616d982a64148bd958088043fd8299 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 18:16:51 +0100 Subject: [PATCH 6/7] fix: suggestions from code review --- libs/@local/hashql/eval/src/postgres/parameters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/@local/hashql/eval/src/postgres/parameters.rs b/libs/@local/hashql/eval/src/postgres/parameters.rs index 11d4fbcd981..e4aa9409d75 100644 --- a/libs/@local/hashql/eval/src/postgres/parameters.rs +++ b/libs/@local/hashql/eval/src/postgres/parameters.rs @@ -24,7 +24,7 @@ id::newtype!( impl From for Expression { fn from(value: ParameterIndex) -> Self { - Self::Parameter(value.as_usize()) + Self::Parameter(value.as_usize() + 1) } } From 5037fe846a738a32fca9ae098428de83365659c0 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sun, 8 Mar 2026 19:27:35 +0100 Subject: [PATCH 7/7] fix: suggestion from code review --- .../hashql/eval/src/postgres/filter/mod.rs | 2 +- libs/@local/hashql/eval/src/postgres/mod.rs | 17 ++++----- .../hashql/eval/src/postgres/parameters.rs | 35 ++++++++++++------- .../ui/postgres/comparison-no-cast.stdout | 12 +++---- .../ui/postgres/constant-true-filter.stdout | 6 ++-- .../ui/postgres/dict-construction.stdout | 12 +++---- .../ui/postgres/entity-archived-check.stdout | 6 ++-- .../postgres/entity-draft-id-equality.stdout | 6 ++-- .../postgres/entity-type-ids-lateral.stdout | 16 ++++----- .../ui/postgres/entity-uuid-equality.stdout | 10 +++--- .../ui/postgres/entity-web-id-equality.stdout | 10 +++--- .../ui/postgres/env-captured-variable.stdout | 10 +++--- .../filter/binary_bitand_bigint_cast.snap | 2 +- .../filter/binary_sub_numeric_cast.snap | 2 +- .../data_island_provides_without_lateral.snap | 26 +++++++------- .../ui/postgres/filter/diamond_cfg_merge.snap | 2 +- .../filter/dynamic_index_projection.snap | 2 +- .../filter/field_by_name_projection.snap | 2 +- .../ui/postgres/filter/island_exit_goto.snap | 2 +- .../filter/island_exit_switch_int.snap | 2 +- .../filter/island_exit_with_live_out.snap | 2 +- .../postgres/filter/left_entity_filter.snap | 2 +- .../filter/nested_property_access.snap | 2 +- .../filter/property_field_equality.snap | 2 +- .../ui/postgres/filter/property_mask.snap | 26 +++++++------- .../provides_drives_select_and_joins.snap | 26 +++++++------- .../filter/straight_line_goto_chain.snap | 2 +- .../filter/switch_int_many_branches.snap | 2 +- .../ui/postgres/filter/unary_bitnot.snap | 2 +- .../tests/ui/postgres/filter/unary_neg.snap | 2 +- .../tests/ui/postgres/filter/unary_not.snap | 2 +- .../ui/postgres/if-input-branches.stdout | 14 ++++---- .../ui/postgres/input-parameter-exists.stdout | 10 +++--- .../ui/postgres/input-parameter-load.stdout | 10 +++--- .../postgres/let-binding-propagation.stdout | 10 +++--- .../ui/postgres/list-construction.stdout | 12 +++---- .../ui/postgres/logical-and-inputs.stdout | 12 +++---- .../minimal-select-no-extra-joins.stdout | 10 +++--- .../ui/postgres/mixed-sources-filter.stdout | 10 +++--- .../tests/ui/postgres/multiple-filters.stdout | 14 ++++---- .../postgres/nested-if-input-branches.stdout | 18 +++++----- .../ui/postgres/opaque-passthrough.stdout | 10 +++--- .../ui/postgres/struct-construction.stdout | 16 ++++----- .../ui/postgres/tuple-construction.stdout | 12 +++---- 44 files changed, 208 insertions(+), 202 deletions(-) diff --git a/libs/@local/hashql/eval/src/postgres/filter/mod.rs b/libs/@local/hashql/eval/src/postgres/filter/mod.rs index 285dc0dafd4..fb854e37498 100644 --- a/libs/@local/hashql/eval/src/postgres/filter/mod.rs +++ b/libs/@local/hashql/eval/src/postgres/filter/mod.rs @@ -295,7 +295,7 @@ impl<'ctx, 'heap, A: Allocator, S: Allocator> GraphReadFilterCompiler<'ctx, 'hea }, rest @ .., ] => { - let param = db.parameters.env(*field); + let param = db.parameters.env(self.body.id, *field); (param.into(), rest) } [..] => { diff --git a/libs/@local/hashql/eval/src/postgres/mod.rs b/libs/@local/hashql/eval/src/postgres/mod.rs index 8fcc1c1d05f..24c8d72bcf6 100644 --- a/libs/@local/hashql/eval/src/postgres/mod.rs +++ b/libs/@local/hashql/eval/src/postgres/mod.rs @@ -36,7 +36,7 @@ use hash_graph_postgres_store::store::postgres::query::{ self, Column, Expression, Identifier, SelectExpression, SelectStatement, Transpile as _, WhereExpression, table::EntityTemporalMetadata, }; -use hashql_core::{heap::BumpAllocator, id::Id as _}; +use hashql_core::heap::BumpAllocator; use hashql_mir::{ body::{ Body, @@ -110,16 +110,11 @@ impl DatabaseContext<'_, A> { fn add_temporal_conditions(&mut self) { let temporal_metadata = self.projections.temporal_metadata(); - let tx_param = Expression::Parameter( - self.parameters - .temporal_axis(TemporalAxis::Transaction) - .as_usize(), - ); - let dt_param = Expression::Parameter( - self.parameters - .temporal_axis(TemporalAxis::Decision) - .as_usize(), - ); + let tx_param = self + .parameters + .temporal_axis(TemporalAxis::Transaction) + .into(); + let dt_param = self.parameters.temporal_axis(TemporalAxis::Decision).into(); self.where_expression.add_condition(Expression::overlap( Expression::ColumnReference(query::ColumnReference { diff --git a/libs/@local/hashql/eval/src/postgres/parameters.rs b/libs/@local/hashql/eval/src/postgres/parameters.rs index e4aa9409d75..a85a7127db8 100644 --- a/libs/@local/hashql/eval/src/postgres/parameters.rs +++ b/libs/@local/hashql/eval/src/postgres/parameters.rs @@ -6,7 +6,10 @@ //! the reverse mapping to bind runtime values in the correct `$N` order. use alloc::alloc::Global; -use core::{alloc::Allocator, fmt}; +use core::{ + alloc::Allocator, + fmt::{self, Display}, +}; use hash_graph_postgres_store::store::postgres::query::Expression; use hashql_core::{ @@ -15,13 +18,20 @@ use hashql_core::{ symbol::Symbol, value::Primitive, }; -use hashql_mir::{body::place::FieldIndex, interpret::value::Int}; +use hashql_mir::{body::place::FieldIndex, def::DefId, interpret::value::Int}; id::newtype!( /// Index of a SQL parameter in the compiled query, rendered as `$N` by the SQL formatter. + #[id(display = !)] pub struct ParameterIndex(u32 is 0..=u32::MAX) ); +impl Display for ParameterIndex { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(fmt, "${}", self.as_u32() + 1) + } +} + impl From for Expression { fn from(value: ParameterIndex) -> Self { Self::Parameter(value.as_usize() + 1) @@ -43,7 +53,7 @@ enum Parameter<'heap> { /// A symbol used as a JSON object key in SQL expressions. Symbol(Symbol<'heap>), /// A captured-environment field access. - Env(FieldIndex), + Env(DefId, FieldIndex), /// Temporal axis range provided by the interpreter at execution time. /// /// The interpreter binds these based on the user's temporal axes configuration: @@ -59,7 +69,7 @@ impl fmt::Display for Parameter<'_> { Self::Int(int) => write!(fmt, "Int({int})"), Self::Primitive(primitive) => write!(fmt, "Primitive({primitive})"), Self::Symbol(symbol) => write!(fmt, "Symbol({symbol})"), - Self::Env(field) => write!(fmt, "Env(#{})", field.as_u32()), + Self::Env(def, field) => write!(fmt, "Env({def}, #{})", field.as_u32()), Self::TemporalAxis(axis) => write!(fmt, "TemporalAxis({axis})"), } } @@ -129,8 +139,8 @@ impl<'heap, A: Allocator> Parameters<'heap, A> { self.get_or_insert(Parameter::Primitive(primitive)) } - pub(crate) fn env(&mut self, field: FieldIndex) -> ParameterIndex { - self.get_or_insert(Parameter::Env(field)) + pub(crate) fn env(&mut self, body: DefId, field: FieldIndex) -> ParameterIndex { + self.get_or_insert(Parameter::Env(body, field)) } pub(crate) fn temporal_axis(&mut self, axis: TemporalAxis) -> ParameterIndex { @@ -150,11 +160,11 @@ impl<'heap, A: Allocator> Parameters<'heap, A> { impl fmt::Display for Parameters<'_, A> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - for (index, param) in self.reverse.iter().enumerate() { - if index > 0 { + for (index, param) in self.reverse.iter_enumerated() { + if index.as_usize() > 0 { fmt.write_str("\n")?; } - write!(fmt, "${index}: {param}")?; + write!(fmt, "{index}: {param}")?; } Ok(()) @@ -168,9 +178,10 @@ mod tests { use hashql_core::{ heap::Heap, + id::Id as _, value::{Primitive, String}, }; - use hashql_mir::{body::place::FieldIndex, interpret::value::Int}; + use hashql_mir::{body::place::FieldIndex, def::DefId, interpret::value::Int}; use super::{Parameters, TemporalAxis}; @@ -236,8 +247,8 @@ mod tests { #[test] fn env_dedup() { let mut params = Parameters::new_in(Global); - let a = params.env(FieldIndex::new(0)); - let b = params.env(FieldIndex::new(0)); + let a = params.env(DefId::MIN, FieldIndex::new(0)); + let b = params.env(DefId::MIN, FieldIndex::new(0)); assert_eq!(a, b); assert_eq!(params.len(), 1); diff --git a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout index 3a0a5727ac9..4670536a811 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/comparison-no-cast.stdout @@ -2,13 +2,13 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((($2 > $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((($3 > $4)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_4_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(x) -$3: Input(y) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(x) +$4: Input(y) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout index c8ba866a07c..5f9c348d0e4 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/constant-true-filter.stdout @@ -2,9 +2,9 @@ SELECT 1 AS "placeholder" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout index fbb362dcc7a..7a347f26c72 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/dict-construction.stdout @@ -2,13 +2,13 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_object("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $3))::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_object("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($3, $4))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_4_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(k) -$3: Input(v) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(k) +$4: Input(v) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout index e2e71700f42..355aa0016a8 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-archived-check.stdout @@ -6,9 +6,9 @@ INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" CROSS JOIN LATERAL (SELECT (ROW(((NOT("entity_editions_0_0_1"."archived"))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_1_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_1_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_1_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout index 06893ece76f..0632230eca6 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-draft-id-equality.stdout @@ -2,9 +2,9 @@ SELECT "entity_temporal_metadata_0_0_0"."draft_id" AS "draft_id" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout index b417cb5ee5a..5d666552cf4 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-type-ids-lateral.stdout @@ -2,19 +2,19 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($3, "b", $4, "v")) AS "entity_type_ids" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($4, "b", $5, "v")) AS "entity_type_ids" FROM "entity_is_of_type_ids" AS "eit" CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_1" ON TRUE -CROSS JOIN LATERAL (SELECT (ROW((("entity_is_of_type_ids_0_0_1"."entity_type_ids" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_is_of_type_ids_0_0_1"."entity_type_ids" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(expected_types) -$3: Symbol(base_url) -$4: Symbol(version) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(expected_types) +$4: Symbol(base_url) +$5: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout index f6063f92ac0..751694e918e 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-uuid-equality.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_7_0"."row")."block" AS "continuation_7_0_block", ("continuation_7_0"."row")."locals" AS "continuation_7_0_locals", ("continuation_7_0"."row")."values" AS "continuation_7_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_7_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_7_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_7_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Primitive("e2851dbb-7376-4959-9bca-f72cafc4448f") +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Primitive("e2851dbb-7376-4959-9bca-f72cafc4448f") diff --git a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout index 751ecba136f..9fca9c14ac6 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/entity-web-id-equality.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(web) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(web) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout index 7b6d8fae6d6..bb92d00015d 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/env-captured-variable.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_0_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_0_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Env(#0) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Env(0, #0) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap index 782f00a9812..a9989a854ec 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_bitand_bigint_cast.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW((((($0)::bigint) & (($1)::bigint))::boolean), NULL, NULL, NULL)::continuation) +(ROW((((($1)::bigint) & (($2)::bigint))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap index a100edd1cf0..76fc4713692 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/binary_sub_numeric_cast.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW((((($0)::numeric) - (($1)::numeric))::boolean), NULL, NULL, NULL)::continuation) +(ROW((((($1)::numeric) - (($2)::numeric))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap index 975503ea588..be49473f65f 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/data_island_provides_without_lateral.snap @@ -4,14 +4,14 @@ expression: query_report.to_string() --- ===================================== SQL ====================================== -SELECT "entity_editions_0_0_1"."properties" AS "properties", jsonb_build_object($2, jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."web_id", $4, "entity_temporal_metadata_0_0_0"."entity_uuid", $5, "entity_temporal_metadata_0_0_0"."draft_id"), $5, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($6, "entity_temporal_metadata_0_0_0"."decision_time", $7, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", "entity_editions_0_0_1"."property_metadata" AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance" +SELECT "entity_editions_0_0_1"."properties" AS "properties", jsonb_build_object($3, jsonb_build_object($4, "entity_temporal_metadata_0_0_0"."web_id", $5, "entity_temporal_metadata_0_0_0"."entity_uuid", $6, "entity_temporal_metadata_0_0_0"."draft_id"), $6, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($7, "entity_temporal_metadata_0_0_0"."decision_time", $8, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", "entity_editions_0_0_1"."property_metadata" AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" INNER JOIN "entity_ids" AS "entity_ids_0_0_3" ON "entity_ids_0_0_3"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" AND "entity_ids_0_0_3"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" -LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($8, "b", $9, "v")) AS "entity_type_ids" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($9, "b", $10, "v")) AS "entity_type_ids" FROM "entity_is_of_type_ids" AS "eit" CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_2" @@ -22,16 +22,16 @@ LEFT OUTER JOIN "entity_has_left_entity" AS "entity_has_left_entity_0_0_4" LEFT OUTER JOIN "entity_has_right_entity" AS "entity_has_right_entity_0_0_5" ON "entity_has_right_entity_0_0_5"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" AND "entity_has_right_entity_0_0_5"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 ================================== Parameters ================================== -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Symbol(entity_id) -$3: Symbol(web_id) -$4: Symbol(entity_uuid) -$5: Symbol(draft_id) -$6: Symbol(decision_time) -$7: Symbol(transaction_time) -$8: Symbol(base_url) -$9: Symbol(version) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Symbol(entity_id) +$4: Symbol(web_id) +$5: Symbol(entity_uuid) +$6: Symbol(draft_id) +$7: Symbol(decision_time) +$8: Symbol(transaction_time) +$9: Symbol(base_url) +$10: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap index 94f52bcc981..0fdc8b8332b 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/diamond_cfg_merge.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -CASE WHEN (($0)::int) = 0 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END +CASE WHEN (($1)::int) = 0 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($1)::int) = 1 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap index 74197698106..3ae7a2f88c4 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/dynamic_index_projection.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((jsonb_extract_path(jsonb_build_array(10, 20, 30), (($0)::text)))::boolean), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path(jsonb_build_array(10, 20, 30), (($1)::text)))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap index 81ebb1c3773..29f95402f4b 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/field_by_name_projection.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((jsonb_extract_path(jsonb_build_object($0, 10, $1, 20), (($0)::text)))::boolean), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path(jsonb_build_object($1, 10, $2, 20), (($1)::text)))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap index f9d10c3670b..cac5ac55385 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_goto.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(NULL, 1, ARRAY[8]::int[], ARRAY[jsonb_build_object($0, jsonb_build_object($1, "entity_temporal_metadata_0_0_0"."web_id", $2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."draft_id"), $3, "entity_temporal_metadata_0_0_0"."draft_id")]::jsonb[])::continuation) +(ROW(NULL, 1, ARRAY[8]::int[], ARRAY[jsonb_build_object($1, jsonb_build_object($2, "entity_temporal_metadata_0_0_0"."web_id", $3, "entity_temporal_metadata_0_0_0"."entity_uuid", $4, "entity_temporal_metadata_0_0_0"."draft_id"), $4, "entity_temporal_metadata_0_0_0"."draft_id")]::jsonb[])::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap index 3d7dbed0692..ce9f2c2a23f 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_switch_int.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -CASE WHEN (($6)::int) = 0 THEN (ROW(NULL, 2, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) WHEN (($6)::int) = 1 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END +CASE WHEN (($7)::int) = 0 THEN (ROW(NULL, 2, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) WHEN (($7)::int) = 1 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap index 4a054e73d70..e0da571eacc 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/island_exit_with_live_out.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(NULL, 1, ARRAY[7]::int[], ARRAY[jsonb_build_object($0, jsonb_build_object($1, "entity_temporal_metadata_0_0_0"."web_id", $2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."draft_id"), $3, "entity_temporal_metadata_0_0_0"."draft_id")]::jsonb[])::continuation) +(ROW(NULL, 1, ARRAY[7]::int[], ARRAY[jsonb_build_object($1, jsonb_build_object($2, "entity_temporal_metadata_0_0_0"."web_id", $3, "entity_temporal_metadata_0_0_0"."entity_uuid", $4, "entity_temporal_metadata_0_0_0"."draft_id"), $4, "entity_temporal_metadata_0_0_0"."draft_id")]::jsonb[])::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap index 5ae77a37836..f67d27c8a61 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/left_entity_filter.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW((("entity_has_left_entity_0_0_1"."left_entity_uuid" = $0)::boolean), NULL, NULL, NULL)::continuation) +(ROW((("entity_has_left_entity_0_0_1"."left_entity_uuid" = $1)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap index 7f13cbd8fe0..5df5bbbed9f 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/nested_property_access.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text), (($1)::text)) = $2)::boolean), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path("entity_editions_0_0_1"."properties", (($1)::text), (($2)::text)) = $3)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap index 287afa325a8..b0a43648597 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_field_equality.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((jsonb_extract_path("entity_editions_0_0_1"."properties", (($0)::text)) = $1)::boolean), NULL, NULL, NULL)::continuation) +(ROW(((jsonb_extract_path("entity_editions_0_0_1"."properties", (($1)::text)) = $2)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap index fd7c02275f8..5b401378fc2 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/property_mask.snap @@ -4,14 +4,14 @@ expression: report.to_string() --- ===================================== SQL ====================================== -SELECT ("entity_editions_0_0_1"."properties" - $99) AS "properties", jsonb_build_object($2, jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."web_id", $4, "entity_temporal_metadata_0_0_0"."entity_uuid", $5, "entity_temporal_metadata_0_0_0"."draft_id"), $5, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($6, "entity_temporal_metadata_0_0_0"."decision_time", $7, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", ("entity_editions_0_0_1"."property_metadata" - $99) AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance", ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values" +SELECT ("entity_editions_0_0_1"."properties" - $99) AS "properties", jsonb_build_object($3, jsonb_build_object($4, "entity_temporal_metadata_0_0_0"."web_id", $5, "entity_temporal_metadata_0_0_0"."entity_uuid", $6, "entity_temporal_metadata_0_0_0"."draft_id"), $6, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($7, "entity_temporal_metadata_0_0_0"."decision_time", $8, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", ("entity_editions_0_0_1"."property_metadata" - $99) AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance", ("continuation_0_0"."row")."block" AS "continuation_0_0_block", ("continuation_0_0"."row")."locals" AS "continuation_0_0_locals", ("continuation_0_0"."row")."values" AS "continuation_0_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" INNER JOIN "entity_ids" AS "entity_ids_0_0_3" ON "entity_ids_0_0_3"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" AND "entity_ids_0_0_3"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" -LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($8, "b", $9, "v")) AS "entity_type_ids" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($9, "b", $10, "v")) AS "entity_type_ids" FROM "entity_is_of_type_ids" AS "eit" CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_2" @@ -24,16 +24,16 @@ LEFT OUTER JOIN "entity_has_right_entity" AS "entity_has_right_entity_0_0_5" AND "entity_has_right_entity_0_0_5"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" CROSS JOIN LATERAL (SELECT (ROW(NULL, 1, ARRAY[]::int[], ARRAY[]::jsonb[])::continuation) AS "row" OFFSET 0) AS "continuation_0_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_0_0"."row")."filter" IS NOT FALSE ================================== Parameters ================================== -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Symbol(entity_id) -$3: Symbol(web_id) -$4: Symbol(entity_uuid) -$5: Symbol(draft_id) -$6: Symbol(decision_time) -$7: Symbol(transaction_time) -$8: Symbol(base_url) -$9: Symbol(version) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Symbol(entity_id) +$4: Symbol(web_id) +$5: Symbol(entity_uuid) +$6: Symbol(draft_id) +$7: Symbol(decision_time) +$8: Symbol(transaction_time) +$9: Symbol(base_url) +$10: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap index 81c22678cea..f41041674b5 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/provides_drives_select_and_joins.snap @@ -4,14 +4,14 @@ expression: report.to_string() --- ===================================== SQL ====================================== -SELECT "entity_editions_0_0_1"."properties" AS "properties", jsonb_build_object($2, jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."web_id", $4, "entity_temporal_metadata_0_0_0"."entity_uuid", $5, "entity_temporal_metadata_0_0_0"."draft_id"), $5, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($6, "entity_temporal_metadata_0_0_0"."decision_time", $7, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", "entity_editions_0_0_1"."property_metadata" AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance" +SELECT "entity_editions_0_0_1"."properties" AS "properties", jsonb_build_object($3, jsonb_build_object($4, "entity_temporal_metadata_0_0_0"."web_id", $5, "entity_temporal_metadata_0_0_0"."entity_uuid", $6, "entity_temporal_metadata_0_0_0"."draft_id"), $6, "entity_temporal_metadata_0_0_0"."draft_id") AS "record_id", jsonb_build_object($7, "entity_temporal_metadata_0_0_0"."decision_time", $8, "entity_temporal_metadata_0_0_0"."transaction_time") AS "temporal_versioning", "entity_is_of_type_ids_0_0_2"."entity_type_ids" AS "entity_type_ids", "entity_editions_0_0_1"."archived" AS "archived", "entity_editions_0_0_1"."confidence" AS "confidence", "entity_ids_0_0_3"."provenance" AS "provenance_inferred", "entity_editions_0_0_1"."provenance" AS "provenance_edition", "entity_editions_0_0_1"."property_metadata" AS "property_metadata", "entity_has_left_entity_0_0_4"."left_web_id" AS "left_entity_web_id", "entity_has_left_entity_0_0_4"."left_entity_uuid" AS "left_entity_uuid", "entity_has_right_entity_0_0_5"."right_web_id" AS "right_entity_web_id", "entity_has_right_entity_0_0_5"."right_entity_uuid" AS "right_entity_uuid", "entity_has_left_entity_0_0_4"."confidence" AS "left_entity_confidence", "entity_has_right_entity_0_0_5"."confidence" AS "right_entity_confidence", "entity_has_left_entity_0_0_4"."provenance" AS "left_entity_provenance", "entity_has_right_entity_0_0_5"."provenance" AS "right_entity_provenance" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" INNER JOIN "entity_ids" AS "entity_ids_0_0_3" ON "entity_ids_0_0_3"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" AND "entity_ids_0_0_3"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" -LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($8, "b", $9, "v")) AS "entity_type_ids" +LEFT OUTER JOIN LATERAL (SELECT jsonb_agg(jsonb_build_object($9, "b", $10, "v")) AS "entity_type_ids" FROM "entity_is_of_type_ids" AS "eit" CROSS JOIN UNNEST("eit"."base_urls", "eit"."versions") AS "u"("b", "v") WHERE "eit"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id") AS "entity_is_of_type_ids_0_0_2" @@ -22,16 +22,16 @@ LEFT OUTER JOIN "entity_has_left_entity" AS "entity_has_left_entity_0_0_4" LEFT OUTER JOIN "entity_has_right_entity" AS "entity_has_right_entity_0_0_5" ON "entity_has_right_entity_0_0_5"."web_id" = "entity_temporal_metadata_0_0_0"."web_id" AND "entity_has_right_entity_0_0_5"."entity_uuid" = "entity_temporal_metadata_0_0_0"."entity_uuid" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 ================================== Parameters ================================== -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Symbol(entity_id) -$3: Symbol(web_id) -$4: Symbol(entity_uuid) -$5: Symbol(draft_id) -$6: Symbol(decision_time) -$7: Symbol(transaction_time) -$8: Symbol(base_url) -$9: Symbol(version) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Symbol(entity_id) +$4: Symbol(web_id) +$5: Symbol(entity_uuid) +$6: Symbol(draft_id) +$7: Symbol(decision_time) +$8: Symbol(transaction_time) +$9: Symbol(base_url) +$10: Symbol(version) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap index fee48eda83e..6b65928d0dd 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/straight_line_goto_chain.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW((($0)::boolean), NULL, NULL, NULL)::continuation) +(ROW((($1)::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap index 8444de9287a..614ce12afeb 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/switch_int_many_branches.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -CASE WHEN (($0)::int) = 0 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 1 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 2 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($0)::int) = 3 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) ELSE (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END +CASE WHEN (($1)::int) = 0 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($1)::int) = 1 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($1)::int) = 2 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($1)::int) = 3 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) ELSE (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) END diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap index e742bdb3940..908979c8712 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_bitnot.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((~($0))::boolean), NULL, NULL, NULL)::continuation) +(ROW(((~($1))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap index abaca23ebf9..6a5f3c81199 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_neg.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((-($0))::boolean), NULL, NULL, NULL)::continuation) +(ROW(((-($1))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap index 8ce8c480b00..a06e9a75e62 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap +++ b/libs/@local/hashql/eval/tests/ui/postgres/filter/unary_not.snap @@ -4,4 +4,4 @@ expression: report.to_string() --- ==================== Island (entry: bb0, target: postgres) ===================== -(ROW(((NOT($0))::boolean), NULL, NULL, NULL)::continuation) +(ROW(((NOT($1))::boolean), NULL, NULL, NULL)::continuation) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout index eb21a3b7170..6c87d1f0131 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/if-input-branches.stdout @@ -2,14 +2,14 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $4)::boolean), NULL, NULL, NULL)::continuation) END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($3)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $4)::boolean), NULL, NULL, NULL)::continuation) WHEN (($3)::int) = 1 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $5)::boolean), NULL, NULL, NULL)::continuation) END AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(flag) -$3: Input(id_b) -$4: Input(id_a) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(flag) +$4: Input(id_b) +$5: Input(id_a) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout index 608d4990518..148c2f48505 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-exists.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2 IS NOT NULL)::int) = 0 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2 IS NOT NULL)::int) = 1 THEN (ROW((($2)::boolean), NULL, NULL, NULL)::continuation) END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($3 IS NOT NULL)::int) = 0 THEN (ROW(((1)::boolean), NULL, NULL, NULL)::continuation) WHEN (($3 IS NOT NULL)::int) = 1 THEN (ROW((($3)::boolean), NULL, NULL, NULL)::continuation) END AS "row" OFFSET 0) AS "continuation_3_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_3_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(optional_flag) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(optional_flag) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout index 01ab6aa78be..bd0a1866e48 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/input-parameter-load.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(user_id) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(user_id) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout index 01ab6aa78be..bd0a1866e48 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/let-binding-propagation.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(user_id) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(user_id) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout index 3da14761d00..6c3997b7dc0 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/list-construction.stdout @@ -2,13 +2,13 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", $2) = jsonb_build_array($3, "entity_temporal_metadata_0_0_0"."entity_uuid"))::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", $3) = jsonb_build_array($4, "entity_temporal_metadata_0_0_0"."entity_uuid"))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_3_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_3_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(u) -$3: Input(v) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(u) +$4: Input(v) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout index 861205c909e..44e9406d201 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/logical-and-inputs.stdout @@ -2,13 +2,13 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN (ROW((($3)::boolean), NULL, NULL, NULL)::continuation) END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($3)::int) = 0 THEN (ROW(((0)::boolean), NULL, NULL, NULL)::continuation) WHEN (($3)::int) = 1 THEN (ROW((($4)::boolean), NULL, NULL, NULL)::continuation) END AS "row" OFFSET 0) AS "continuation_3_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_3_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(a) -$3: Input(b) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(a) +$4: Input(b) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout index 3de5f5112f2..976e9859736 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/minimal-select-no-extra-joins.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(expected_web) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(expected_web) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout index 2458d6a1604..75feded7550 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/mixed-sources-filter.stdout @@ -6,12 +6,12 @@ INNER JOIN "entity_editions" AS "entity_editions_0_0_1" ON "entity_editions_0_0_1"."entity_edition_id" = "entity_temporal_metadata_0_0_0"."entity_edition_id" CROSS JOIN LATERAL (SELECT (ROW(((NOT("entity_editions_0_0_1"."archived"))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_1_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_0_0"."row")."filter" IS NOT FALSE AND ("continuation_1_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_0_0"."row")."filter" IS NOT FALSE AND ("continuation_1_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Env(#0) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Env(1, #0) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout index 6995c838107..528d2e7d33b 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/multiple-filters.stdout @@ -2,15 +2,15 @@ SELECT ("continuation_3_0"."row")."block" AS "continuation_3_0_block", ("continuation_3_0"."row")."locals" AS "continuation_3_0_locals", ("continuation_3_0"."row")."values" AS "continuation_3_0_values", ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_3_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."web_id" = $4)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_3_0"."row")."filter" IS NOT FALSE AND ("continuation_4_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_3_0"."row")."filter" IS NOT FALSE AND ("continuation_4_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(uuid) -$3: Input(web) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(uuid) +$4: Input(web) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout index 06a006137ec..a3e812791fe 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/nested-if-input-branches.stdout @@ -2,16 +2,16 @@ SELECT ("continuation_2_0"."row")."block" AS "continuation_2_0_block", ("continuation_2_0"."row")."locals" AS "continuation_2_0_locals", ("continuation_2_0"."row")."values" AS "continuation_2_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT CASE WHEN (($2)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) WHEN (($2)::int) = 1 THEN CASE WHEN (($4)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $5)::boolean), NULL, NULL, NULL)::continuation) WHEN (($4)::int) = 1 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $6)::boolean), NULL, NULL, NULL)::continuation) END END AS "row" +CROSS JOIN LATERAL (SELECT CASE WHEN (($3)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $4)::boolean), NULL, NULL, NULL)::continuation) WHEN (($3)::int) = 1 THEN CASE WHEN (($5)::int) = 0 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $6)::boolean), NULL, NULL, NULL)::continuation) WHEN (($5)::int) = 1 THEN (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $7)::boolean), NULL, NULL, NULL)::continuation) END END AS "row" OFFSET 0) AS "continuation_2_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_2_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_2_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(foo) -$3: Input(id_c) -$4: Input(bar) -$5: Input(id_b) -$6: Input(id_a) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(foo) +$4: Input(id_c) +$5: Input(bar) +$6: Input(id_b) +$7: Input(id_a) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout index 891cf808e09..b16f6bacefd 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/opaque-passthrough.stdout @@ -2,12 +2,12 @@ SELECT ("continuation_8_0"."row")."block" AS "continuation_8_0_block", ("continuation_8_0"."row")."locals" AS "continuation_8_0_locals", ("continuation_8_0"."row")."values" AS "continuation_8_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $2)::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW((("entity_temporal_metadata_0_0_0"."entity_uuid" = $3)::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_8_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_8_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_8_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(id) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(id) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout index 5ac47c9b6af..f630b4d404e 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/struct-construction.stdout @@ -2,15 +2,15 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_object($2, "entity_temporal_metadata_0_0_0"."entity_uuid", $3, "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($2, $4, $3, $5))::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_object($3, "entity_temporal_metadata_0_0_0"."entity_uuid", $4, "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_object($3, $5, $4, $6))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_4_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Symbol(uuid) -$3: Symbol(web) -$4: Input(u) -$5: Input(w) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Symbol(uuid) +$4: Symbol(web) +$5: Input(u) +$6: Input(w) diff --git a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout index b0f5d2884db..9846897911e 100644 --- a/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout +++ b/libs/@local/hashql/eval/tests/ui/postgres/tuple-construction.stdout @@ -2,13 +2,13 @@ SELECT ("continuation_4_0"."row")."block" AS "continuation_4_0_block", ("continuation_4_0"."row")."locals" AS "continuation_4_0_locals", ("continuation_4_0"."row")."values" AS "continuation_4_0_values" FROM "entity_temporal_metadata" AS "entity_temporal_metadata_0_0_0" -CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_array($2, $3))::boolean), NULL, NULL, NULL)::continuation) AS "row" +CROSS JOIN LATERAL (SELECT (ROW(((jsonb_build_array("entity_temporal_metadata_0_0_0"."entity_uuid", "entity_temporal_metadata_0_0_0"."web_id") = jsonb_build_array($3, $4))::boolean), NULL, NULL, NULL)::continuation) AS "row" OFFSET 0) AS "continuation_4_0" -WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $0 AND "entity_temporal_metadata_0_0_0"."decision_time" && $1 AND ("continuation_4_0"."row")."filter" IS NOT FALSE +WHERE "entity_temporal_metadata_0_0_0"."transaction_time" && $1 AND "entity_temporal_metadata_0_0_0"."decision_time" && $2 AND ("continuation_4_0"."row")."filter" IS NOT FALSE ════ Parameters ════════════════════════════════════════════════════════════════ -$0: TemporalAxis(Transaction) -$1: TemporalAxis(Decision) -$2: Input(u) -$3: Input(w) +$1: TemporalAxis(Transaction) +$2: TemporalAxis(Decision) +$3: Input(u) +$4: Input(w)