diff --git a/src/lib.rs b/src/lib.rs index 136aec5..b64f3d1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,7 +19,7 @@ use crate::query::ast::{ QueryOperation, SqlValue, }; use crate::query::compiler; -use crate::query::lookup; +use crate::query::lookups; use crate::transaction::TransactionHandle; // ### @@ -59,12 +59,12 @@ fn setup<'py>( #[pyfunction] fn register_lookup(name: String, sql_template: String) -> PyResult<()> { - lookup::register_custom(name, sql_template).map_err(PyErr::from) + lookups::register_custom(name, sql_template).map_err(PyErr::from) } #[pyfunction] fn available_lookups() -> PyResult> { - lookup::registered_lookups().map_err(PyErr::from) + lookups::registered_lookups().map_err(PyErr::from) } #[pyfunction] @@ -811,7 +811,7 @@ fn bulk_update<'py>( #[pymodule] fn ryx_core(m: &Bound<'_, PyModule>) -> PyResult<()> { - lookup::init_registry(); + lookups::init_registry(); let mut builder = tokio::runtime::Builder::new_multi_thread(); builder.worker_threads(4).enable_all(); diff --git a/src/query/compiler.rs b/src/query/compiler/compiler.rs similarity index 65% rename from src/query/compiler.rs rename to src/query/compiler/compiler.rs index f4baac5..06e3dad 100644 --- a/src/query/compiler.rs +++ b/src/query/compiler/compiler.rs @@ -1,14 +1,10 @@ // // ### -// Ryx — SQL Compiler +// Ryx — SQL Compiler Implementation +// ### // -// Supports: -// compile_q() : recursive Q-tree → SQL fragment -// compile_joins() : JoinClause list → SQL JOIN clauses -// compile_aggs() : AggregateExpr list → SELECT aggregate columns -// compile_group_by(): GROUP BY clause -// compile_having() : HAVING clause (same engine as WHERE) -// compile_select() : now merges plain columns + aggregate annotations +// This file contains the SQL compiler that transforms QueryNode AST into SQL strings. +// See compiler/mod.rs for the module structure. // ### use crate::errors::{RyxError, RyxResult}; @@ -17,20 +13,20 @@ use crate::query::ast::{ AggFunc, AggregateExpr, FilterNode, JoinClause, JoinKind, QNode, QueryNode, QueryOperation, SortDirection, SqlValue, }; -use crate::query::lookup::{self, LookupContext}; +use crate::query::lookups::date_lookups as date; +use crate::query::lookups::json_lookups as json; +use crate::query::lookups::{self, LookupContext}; + +pub use super::helpers::{apply_like_wrapping, qualified_col, split_qualified, KNOWN_TRANSFORMS}; + +use super::helpers; -// ### -// Output type -// ### #[derive(Debug, Clone)] pub struct CompiledQuery { pub sql: String, pub values: Vec, } -// ### -// Public entry point -// ### pub fn compile(node: &QueryNode) -> RyxResult { let mut values: Vec = Vec::new(); let sql = match &node.operation { @@ -49,22 +45,16 @@ pub fn compile(node: &QueryNode) -> RyxResult { Ok(CompiledQuery { sql, values }) } -// ### -// SELECT -// ### - fn compile_select( node: &QueryNode, columns: Option<&[String]>, values: &mut Vec, ) -> RyxResult { - // # SELECT list - // Columns from plain columns arg + annotation aliases merged together. let base_cols = match columns { None => "*".to_string(), Some(cols) => cols .iter() - .map(|c| qualified_col(c)) + .map(|c| helpers::qualified_col(c)) .collect::>() .join(", "), }; @@ -74,15 +64,13 @@ fn compile_select( let select_list = match (base_cols.as_str(), agg_cols.as_str()) { (_, "") => base_cols, ("*", _) => { - // When we have annotations we drop the bare * and only emit the - // GROUP BY columns + aggregates (standard SQL). if node.group_by.is_empty() { agg_cols } else { let gb = node .group_by .iter() - .map(|c| quote_col(c)) + .map(|c| helpers::quote_col(c)) .collect::>() .join(", "); format!("{gb}, {agg_cols}") @@ -94,16 +82,14 @@ fn compile_select( let distinct = if node.distinct { "DISTINCT " } else { "" }; let mut sql = format!( "SELECT {distinct}{select_list} FROM {tbl}", - tbl = quote_col(&node.table), + tbl = helpers::quote_col(&node.table), ); - // # JOINs if !node.joins.is_empty() { sql.push(' '); sql.push_str(&compile_joins(&node.joins)); } - // # WHERE let where_sql = compile_where_combined(&node.filters, node.q_filter.as_ref(), values, node.backend)?; if !where_sql.is_empty() { @@ -111,26 +97,23 @@ fn compile_select( sql.push_str(&where_sql); } - // # GROUP BY if !node.group_by.is_empty() { let gb = node .group_by .iter() - .map(|c| quote_col(c)) + .map(|c| helpers::quote_col(c)) .collect::>() .join(", "); sql.push_str(" GROUP BY "); sql.push_str(&gb); } - // # HAVING if !node.having.is_empty() { let having = compile_filters(&node.having, values, node.backend)?; sql.push_str(" HAVING "); sql.push_str(&having); } - // # ORDER BY if !node.order_by.is_empty() { sql.push_str(" ORDER BY "); sql.push_str(&compile_order_by(&node.order_by)); @@ -146,12 +129,6 @@ fn compile_select( Ok(sql) } -// ### -// AGGREGATE (no rows returned — only aggregate scalars) -// -// Used by `.aggregate(total=Sum("views"))`. -// Returns a single row dict like {"total": 1234, "avg_views": 42.5}. -// ### fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult { if node.annotations.is_empty() { return Err(RyxError::Internal( @@ -159,7 +136,7 @@ fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult< )); } let agg_cols = compile_agg_cols(&node.annotations); - let mut sql = format!("SELECT {agg_cols} FROM {}", quote_col(&node.table)); + let mut sql = format!("SELECT {agg_cols} FROM {}", helpers::quote_col(&node.table)); if !node.joins.is_empty() { sql.push(' '); @@ -176,16 +153,8 @@ fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult< Ok(sql) } -// ### -// COUNT -// ### - -// ### -// COUNT -// ### - fn compile_count(node: &QueryNode, values: &mut Vec) -> RyxResult { - let mut sql = format!("SELECT COUNT(*) FROM {}", quote_col(&node.table)); + let mut sql = format!("SELECT COUNT(*) FROM {}", helpers::quote_col(&node.table)); if !node.joins.is_empty() { sql.push(' '); sql.push_str(&compile_joins(&node.joins)); @@ -199,12 +168,8 @@ fn compile_count(node: &QueryNode, values: &mut Vec) -> RyxResult) -> RyxResult { - let mut sql = format!("DELETE FROM {}", quote_col(&node.table)); + let mut sql = format!("DELETE FROM {}", helpers::quote_col(&node.table)); let where_sql = compile_where_combined(&node.filters, node.q_filter.as_ref(), values, node.backend)?; if !where_sql.is_empty() { @@ -214,10 +179,6 @@ fn compile_delete(node: &QueryNode, values: &mut Vec) -> RyxResult>() .join(", "); let ph = std::iter::repeat_n("?", cols.len()) @@ -268,7 +229,7 @@ fn compile_insert( .join(", "); let mut sql = format!( "INSERT INTO {} ({}) VALUES ({})", - quote_col(&node.table), + helpers::quote_col(&node.table), cols_sql, ph ); @@ -278,11 +239,7 @@ fn compile_insert( Ok(sql) } -// ### -// JOIN compilation -// ### - -fn compile_joins(joins: &[JoinClause]) -> String { +pub fn compile_joins(joins: &[JoinClause]) -> String { joins .iter() .map(|j| { @@ -296,26 +253,34 @@ fn compile_joins(joins: &[JoinClause]) -> String { let alias_sql = j .alias .as_deref() - .map(|a| format!(" AS {}", quote_col(a))) + .map(|a| format!(" AS {}", helpers::quote_col(a))) .unwrap_or_default(); - let (l_table, l_col) = split_qualified(&j.on_left); - let (r_table, r_col) = split_qualified(&j.on_right); + let (l_table, l_col): (String, String) = helpers::split_qualified(&j.on_left); + let (r_table, r_col): (String, String) = helpers::split_qualified(&j.on_right); let on_l = if l_table.is_empty() { - quote_col(&l_col) + helpers::quote_col(&l_col) } else { - format!("{}.{}", quote_col(&l_table), quote_col(&l_col)) + format!( + "{}.{}", + helpers::quote_col(&l_table), + helpers::quote_col(&l_col) + ) }; let on_r = if r_table.is_empty() { - quote_col(&r_col) + helpers::quote_col(&r_col) } else { - format!("{}.{}", quote_col(&r_table), quote_col(&r_col)) + format!( + "{}.{}", + helpers::quote_col(&r_table), + helpers::quote_col(&r_col) + ) }; if j.kind == JoinKind::CrossJoin { - format!("{kind} {}{alias_sql}", quote_col(&j.table)) + format!("{kind} {}{alias_sql}", helpers::quote_col(&j.table)) } else { format!( "{kind} {}{alias_sql} ON {on_l} = {on_r}", - quote_col(&j.table) + helpers::quote_col(&j.table) ) } }) @@ -323,17 +288,13 @@ fn compile_joins(joins: &[JoinClause]) -> String { .join(" ") } -// ### -// Aggregate column list → SUM("views") AS "total_views", ... -// ### - -fn compile_agg_cols(anns: &[AggregateExpr]) -> String { +pub fn compile_agg_cols(anns: &[AggregateExpr]) -> String { anns.iter() .map(|a| { let col = if a.field == "*" { "*".to_string() } else { - qualified_col(&a.field) + helpers::qualified_col(&a.field) }; let distinct = if a.distinct && a.func != AggFunc::Count { "DISTINCT " @@ -343,13 +304,13 @@ fn compile_agg_cols(anns: &[AggregateExpr]) -> String { "" }; match &a.func { - AggFunc::Raw(expr) => format!("{expr} AS {}", quote_col(&a.alias)), + AggFunc::Raw(expr) => format!("{expr} AS {}", helpers::quote_col(&a.alias)), f => format!( "{}({}{}) AS {}", f.sql_name(), distinct, col, - quote_col(&a.alias) + helpers::quote_col(&a.alias) ), } }) @@ -357,9 +318,19 @@ fn compile_agg_cols(anns: &[AggregateExpr]) -> String { .join(", ") } -// ### -// WHERE = flat filters AND Q-tree (merged) -// ### +pub fn compile_order_by(clauses: &[crate::query::ast::OrderByClause]) -> String { + clauses + .iter() + .map(|c| { + let dir = match c.direction { + SortDirection::Asc => "ASC", + SortDirection::Desc => "DESC", + }; + format!("{} {dir}", helpers::qualified_col(&c.field)) + }) + .collect::>() + .join(", ") +} fn compile_where_combined( filters: &[FilterNode], @@ -385,15 +356,7 @@ fn compile_where_combined( }) } -// ### -// Q-tree compiler (recursive) -// ### - -/// Recursively compile a QNode tree into a SQL fragment. -/// -/// Design: we emit minimal parentheses — each non-leaf node wraps its children -/// in parens only when necessary (AND inside OR must be parenthesised). -fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResult { +pub fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResult { match q { QNode::Leaf { field, @@ -422,10 +385,6 @@ fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResu } } -// ### -// Flat filter list compiler -// ### - fn compile_filters( filters: &[FilterNode], values: &mut Vec, @@ -438,10 +397,6 @@ fn compile_filters( Ok(parts.join(" AND ")) } -// ### -// Single filter → SQL fragment (shared by flat list and Q-tree) -// ### - fn compile_single_filter( field: &str, lookup: &str, @@ -450,38 +405,24 @@ fn compile_single_filter( values: &mut Vec, backend: Backend, ) -> RyxResult { - // Support "table.column" qualified references in filters - // Also handle field__transform patterns (e.g., "created_at__year") - // For JSON key lookups like "bio__key__priority", we need to handle specially - let known_transforms = [ - "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", - "time", "iso_week", "iso_dow", "key", "key_text", "json", - ]; - let (base_column, applied_transforms, json_key) = if field.contains("__") { let parts: Vec<&str> = field.split("__").collect(); - // Find the first part that's NOT a known transform - that's the JSON key - // For example: "bio__key__priority" -> transforms=["key"], key="priority", base="bio" let mut transforms = Vec::new(); let mut key_part: Option<&str> = None; for part in parts[1..].iter() { - if known_transforms.contains(part) { + if KNOWN_TRANSFORMS.contains(part) { transforms.push(*part); } else { - // First non-transform part is the JSON key key_part = Some(*part); break; } } if let Some(key) = key_part { - // Base column is just the first part (the field name) - // Transforms is everything that came before the key (parts[0].to_string(), transforms, Some(key.to_string())) } else if !transforms.is_empty() { - // All parts are transforms (parts[0].to_string(), transforms, None) } else { (field.to_string(), vec![], None) @@ -490,28 +431,18 @@ fn compile_single_filter( (field.to_string(), vec![], None) }; - // For JSON key transforms, we need to pass the key to resolve() - // The key is embedded in the field name (bio__key__priority -> key=priority) - - // If the lookup contains "__" (is a chained lookup like "month__gte"), - // DON'T apply transforms here - let resolve() handle it completely - // This avoids double-transform issues where the compiler applies transform - // and then resolve() also tries to handle it let final_column = if lookup.contains("__") { - // For chained lookups, use just the base column - resolve() will handle transforms - qualified_col(&base_column) + helpers::qualified_col(&base_column) } else if !applied_transforms.is_empty() { - // For simple transform-only lookups (like "year"), apply transforms here - let mut result = qualified_col(&base_column); + let mut result = helpers::qualified_col(&base_column); for transform in &applied_transforms { - result = lookup::apply_transform(transform, &result, backend, None)?; + result = lookups::apply_transform(transform, &result, backend, None)?; } result } else { - qualified_col(&base_column) + helpers::qualified_col(&base_column) }; - // For JSON key transforms, pass the key in the context let ctx = LookupContext { column: final_column.clone(), negated, @@ -519,7 +450,6 @@ fn compile_single_filter( json_key: json_key.clone(), }; - // # isnull (no bind param) if lookup == "isnull" { let is_null = match value { SqlValue::Bool(b) => *b, @@ -538,7 +468,6 @@ fn compile_single_filter( }); } - // # in (expand N placeholders) if lookup == "in" { let items = match value { SqlValue::List(v) => v.clone(), @@ -547,6 +476,7 @@ fn compile_single_filter( if items.is_empty() { return Ok("(1 = 0)".into()); } + let ph = std::iter::repeat_n("?", items.len()) .collect::>() .join(", "); @@ -559,7 +489,6 @@ fn compile_single_filter( }); } - // # range (two bind params) if lookup == "range" { let (lo, hi) = match value { SqlValue::List(v) if v.len() == 2 => (v[0].clone(), v[1].clone()), @@ -575,19 +504,8 @@ fn compile_single_filter( }); } - // # general lookup - // If lookup is a transform (like "year", "month"), use the transform function which includes = ? - // BUT if lookup contains "__" (like "date__gte"), we need to use resolve() to handle the chain - // ALSO use resolve() for JSON key transforms even if lookup is simple (like "exact") - let known_transforms = [ - "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", - "time", "iso_week", "iso_dow", "key", "key_text", "json", - ]; - - // If lookup contains "__", it's a chained lookup (e.g., "date__gte") - use resolve() - // OR if we have a JSON key (json_key is Some), we need resolve() to apply it if lookup.contains("__") || json_key.is_some() { - let fragment = lookup::resolve(&base_column, lookup, &ctx)?; + let fragment = lookups::resolve(&base_column, lookup, &ctx)?; values.push(value.clone()); return Ok(if negated { format!("NOT ({fragment})") @@ -596,24 +514,24 @@ fn compile_single_filter( }); } - if known_transforms.contains(&lookup) { + if KNOWN_TRANSFORMS.contains(&lookup) { let transform_fn = match lookup { - "date" => lookup::date_transform, - "year" => lookup::year_transform, - "month" => lookup::month_transform, - "day" => lookup::day_transform, - "hour" => lookup::hour_transform, - "minute" => lookup::minute_transform, - "second" => lookup::second_transform, - "week" => lookup::week_transform, - "dow" => lookup::dow_transform, - "quarter" => lookup::quarter_transform, - "time" => lookup::time_transform, - "iso_week" => lookup::iso_week_transform, - "iso_dow" => lookup::iso_dow_transform, - "key" => lookup::json_key_transform, - "key_text" => lookup::json_key_text_transform, - "json" => lookup::json_cast_transform, + "date" => date::date_transform as crate::query::lookups::LookupFn, + "year" => date::year_transform as crate::query::lookups::LookupFn, + "month" => date::month_transform as crate::query::lookups::LookupFn, + "day" => date::day_transform as crate::query::lookups::LookupFn, + "hour" => date::hour_transform as crate::query::lookups::LookupFn, + "minute" => date::minute_transform as crate::query::lookups::LookupFn, + "second" => date::second_transform as crate::query::lookups::LookupFn, + "week" => date::week_transform as crate::query::lookups::LookupFn, + "dow" => date::dow_transform as crate::query::lookups::LookupFn, + "quarter" => date::quarter_transform as crate::query::lookups::LookupFn, + "time" => date::time_transform as crate::query::lookups::LookupFn, + "iso_week" => date::iso_week_transform as crate::query::lookups::LookupFn, + "iso_dow" => date::iso_dow_transform as crate::query::lookups::LookupFn, + "key" => json::json_key_transform as crate::query::lookups::LookupFn, + "key_text" => json::json_key_text_transform as crate::query::lookups::LookupFn, + "json" => json::json_cast_transform as crate::query::lookups::LookupFn, _ => { return Err(RyxError::UnknownLookup { field: field.to_string(), @@ -621,12 +539,11 @@ fn compile_single_filter( }) } }; - // For transforms, we need to push the value to the values vector values.push(value.clone()); return Ok(transform_fn(&ctx)); } - let fragment = lookup::resolve(&base_column, lookup, &ctx)?; + let fragment = lookups::resolve(&base_column, lookup, &ctx)?; let bound = apply_like_wrapping(lookup, value.clone()); values.push(bound); Ok(if negated { @@ -636,95 +553,21 @@ fn compile_single_filter( }) } -// ### -// ORDER BY -// ### -fn compile_order_by(clauses: &[crate::query::ast::OrderByClause]) -> String { - clauses - .iter() - .map(|c| { - let dir = match c.direction { - SortDirection::Asc => "ASC", - SortDirection::Desc => "DESC", - }; - format!("{} {dir}", qualified_col(&c.field)) - }) - .collect::>() - .join(", ") -} - -// ### -// Identifier helpers -// ### - -/// Double-quote a simple identifier (column or table name). -pub fn quote_col(s: &str) -> String { - format!("\"{}\"", s.replace('"', "\"\"")) -} - -/// Handle `table.column` → `"table"."column"`, or plain column → `"column"`. -/// Also handles annotation aliases (already an expression — left as-is). -fn qualified_col(s: &str) -> String { - if s.contains('.') { - let (table, col) = s.split_once('.').unwrap(); - format!("{}.{}", quote_col(table), quote_col(col)) - } else { - quote_col(s) - } -} - -/// Split `"table.column"` into `("table", "column")`. -/// Returns `("", s)` if there is no dot. -fn split_qualified(s: &str) -> (String, String) { - if let Some((t, c)) = s.split_once('.') { - (t.to_string(), c.to_string()) - } else { - (String::new(), s.to_string()) - } -} - -/// Apply LIKE `%` wrapping to the value based on the lookup type. -fn apply_like_wrapping(lookup: &str, value: SqlValue) -> SqlValue { - match lookup { - "contains" | "icontains" => wrap_text(value, |s| format!("%{s}%")), - "startswith" | "istartswith" => wrap_text(value, |s| format!("{s}%")), - "endswith" | "iendswith" => wrap_text(value, |s| format!("%{s}")), - _ => value, - } -} - -fn wrap_text(value: SqlValue, f: impl Fn(String) -> String) -> SqlValue { - if let SqlValue::Text(s) = value { - SqlValue::Text(f(s)) - } else { - value - } -} - -// ### -// Unit tests -// ### - #[cfg(test)] mod tests { use super::*; use crate::query::ast::*; - use crate::query::lookup; - - fn init() { - lookup::init_registry(); - } #[test] fn test_bare_select() { - init(); + init_registry(); let q = compile(&QueryNode::select("posts")).unwrap(); assert_eq!(q.sql, r#"SELECT * FROM "posts""#); } #[test] fn test_q_or() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node = node.with_q(QNode::Or(vec![ QNode::Leaf { @@ -746,7 +589,7 @@ mod tests { #[test] fn test_inner_join() { - init(); + init_registry(); let node = QueryNode::select("posts").with_join(JoinClause { kind: JoinKind::Inner, table: "authors".into(), @@ -761,7 +604,7 @@ mod tests { #[test] fn test_aggregate_sum() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node.operation = QueryOperation::Aggregate; node = node.with_annotation(AggregateExpr { @@ -777,7 +620,7 @@ mod tests { #[test] fn test_group_by() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node = node .with_annotation(AggregateExpr { @@ -793,7 +636,7 @@ mod tests { #[test] fn test_having() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node.operation = QueryOperation::Select { columns: None }; node = node @@ -813,4 +656,8 @@ mod tests { let q = compile(&node).unwrap(); assert!(q.sql.contains("HAVING"), "{}", q.sql); } + + fn init_registry() { + crate::query::lookups::init_registry(); + } } diff --git a/src/query/compiler/helpers.rs b/src/query/compiler/helpers.rs new file mode 100644 index 0000000..9d039db --- /dev/null +++ b/src/query/compiler/helpers.rs @@ -0,0 +1,62 @@ +// +// ### +// Ryx — Compiler Helpers +// ### +// +// Contains internal helper functions for SQL compilation: +// - Identifier quoting (quote_col, qualified_col, split_qualified) +// - LIKE wrapping (apply_like_wrapping) +// - Other compilation utilities +// ### + +use crate::query::ast::SqlValue; + +/// Double-quote a simple identifier (column or table name). +pub fn quote_col(s: &str) -> String { + format!("\"{}\"", s.replace('"', "\"\"")) +} + +/// Handle `table.column` → `"table"."column"`, or plain column → `"column"`. +/// Also handles annotation aliases (already an expression — left as-is). +pub fn qualified_col(s: &str) -> String { + if s.contains('.') { + let (table, col) = s.split_once('.').unwrap(); + format!("{}.{}", quote_col(table), quote_col(col)) + } else { + quote_col(s) + } +} + +/// Split `"table.column"` into `("table", "column")`. +/// Returns `("", s)` if there is no dot. +pub fn split_qualified(s: &str) -> (String, String) { + if let Some((t, c)) = s.split_once('.') { + (t.to_string(), c.to_string()) + } else { + (String::new(), s.to_string()) + } +} + +/// Apply LIKE `%` wrapping to the value based on the lookup type. +pub fn apply_like_wrapping(lookup: &str, value: SqlValue) -> SqlValue { + match lookup { + "contains" | "icontains" => wrap_text(value, |s| format!("%{s}%")), + "startswith" | "istartswith" => wrap_text(value, |s| format!("{s}%")), + "endswith" | "iendswith" => wrap_text(value, |s| format!("%{s}")), + _ => value, + } +} + +fn wrap_text(value: SqlValue, f: impl Fn(String) -> String) -> SqlValue { + if let SqlValue::Text(s) = value { + SqlValue::Text(f(s)) + } else { + value + } +} + +/// Known transforms that can be applied in field paths +pub const KNOWN_TRANSFORMS: [&str; 16] = [ + "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", "time", + "iso_week", "iso_dow", "key", "key_text", "json", +]; diff --git a/src/query/compiler/mod.rs b/src/query/compiler/mod.rs new file mode 100644 index 0000000..e550b88 --- /dev/null +++ b/src/query/compiler/mod.rs @@ -0,0 +1,29 @@ +// +// ### +// Ryx — Compiler Module +// ### +// +// This module contains the SQL compiler that transforms QueryNode AST into SQL strings. +// The module is organized as: +// - mod.rs : Re-exports from compiler.rs +// - compiler.rs: Main implementation (compile, compile_select, etc.) +// - helpers.rs : Internal helper functions (quote_col, qualified_col, etc.) +// ### + +pub mod compiler; +pub mod helpers; + +// Re-export from compiler.rs +pub use compiler::compile; +pub use compiler::compile_agg_cols; +pub use compiler::compile_joins; +pub use compiler::compile_order_by; +pub use compiler::compile_q; +pub use compiler::CompiledQuery; + +// Re-export from helpers.rs +pub use helpers::apply_like_wrapping; +pub use helpers::qualified_col; +pub use helpers::quote_col; +pub use helpers::split_qualified; +pub use helpers::KNOWN_TRANSFORMS; diff --git a/src/query/lookup.rs b/src/query/lookup.rs deleted file mode 100644 index cb7da75..0000000 --- a/src/query/lookup.rs +++ /dev/null @@ -1,873 +0,0 @@ -// -// ### -// Ryx — Lookup System -// ### -// -// A "lookup" is the suffix after `__` in a filter expression. -// Examples: -// `age__gte=25` → lookup = "gte", SQL = "age >= $1" -// `name__icontains="bob"` → lookup = "icontains", SQL = "LOWER(name) LIKE LOWER($1)" -// `id__in=[1,2,3]` → lookup = "in", SQL = "id IN ($1, $2, $3)" -// -// # Extensibility design -// -// Users can register custom lookups from Python: -// -// from Ryx import register_lookup -// -// @register_lookup("uuid_prefix") -// def uuid_prefix_lookup(field: str, _value) -> str: -// return f"{field}::text LIKE ${{placeholder}}" -// -// Internally this works via a global `DashMap` that stores -// both the built-in lookups and any user-registered ones. We use DashMap -// (concurrent HashMap) so registrations from Python threads are safe. -// -// Why not a trait object (`Box`)? We need lookups to be thread-safe -// and Send+Sync since they're shared across async tasks. Function pointers -// (`fn`) are always Send+Sync, so they're stored directly in the map. -// For user-registered lookups (coming from Python callables) we store a -// Python-side callable name and call back to Python at query-build time. -// -// # SQL placeholder strategy -// -// Different databases use different placeholder syntax: -// PostgreSQL: $1, $2, $3, ... -// MySQL: ?, ?, ?, ... -// SQLite: ?, ?, ?, ... -// -// We abstract this by always generating `?` placeholders in the AST and -// letting the backend-specific compiler rewrite them. This is exactly what -// sqlx's `AnyPool` does internally. -// ### - -use std::collections::HashMap; -use std::sync::{OnceLock, RwLock}; - -use crate::errors::{RyxError, RyxResult}; -use crate::pool::Backend; - -// ### -// Core types -// ### - -/// Context passed to every lookup function when building a SQL fragment. -/// -/// The lookup function receives the column name and must return a SQL fragment -/// with `?` as the value placeholder. It does NOT need to know the placeholder -/// index — the compiler handles numbering. -/// -/// # Example (for the "gte" lookup) -/// ``` -/// // field = "age", returns: "age >= ?" -/// fn gte_lookup(ctx: &LookupContext) -> String { -/// format!("{} >= ?", ctx.column) -/// } -/// ``` -#[derive(Debug, Clone)] -pub struct LookupContext { - /// The SQL column name, already quoted/escaped. - pub column: String, - - /// Whether the lookup is negated (i.e., inside an `exclude()` call). - /// Most lookups ignore this — negation is applied by the compiler. - pub negated: bool, - - /// The database backend (PostgreSQL, MySQL, SQLite). - /// Used for backend-specific SQL generation. - pub backend: Backend, - - /// For JSON key transforms (e.g., bio__key__priority), this holds the key name ("priority") - /// Used by apply_transform() to generate correct JSON path accessors. - pub json_key: Option, -} - -/// The function signature for a built-in lookup implementation. -/// -/// Takes a `LookupContext` and returns a SQL fragment string. -/// The function must be `fn` (not closure) to be `Send + Sync`. -pub type LookupFn = fn(&LookupContext) -> String; - -/// A lookup that was registered from Python: stores the callable and a -/// Rust-generated SQL template where `{col}` is the column placeholder. -/// -/// Python-registered lookups are called at SQL-build time with the column -/// name substituted in. This avoids holding the GIL for every query. -/// The Python callable is only invoked once at registration time to extract -/// the SQL template string. -#[derive(Debug, Clone)] -pub struct PythonLookup { - /// Pre-rendered SQL template. Example: `"LOWER({col}) LIKE LOWER(?)"` - /// The caller substitutes `{col}` with the actual column name. - pub sql_template: String, -} - -// -// Global lookup registry -// -/// The two registries live side-by-side: -/// - `builtin`: populated once at startup with the built-in lookups -/// - `custom`: populated at runtime with user-registered lookups -/// -/// We check `custom` first so users can override built-ins (e.g., to change -/// the SQL generated by `icontains` for a database that has native ILIKE). -struct LookupRegistry { - builtin: HashMap<&'static str, LookupFn>, - custom: HashMap, -} - -static REGISTRY: OnceLock> = OnceLock::new(); - -/// Initialize the registry with all built-in lookups. -/// Called once from `lib.rs` module initialization. -pub fn init_registry() { - REGISTRY.get_or_init(|| { - let mut builtin = HashMap::new(); - - // Comparison lookups - builtin.insert("exact", exact as LookupFn); - builtin.insert("gt", gt as LookupFn); - builtin.insert("gte", gte as LookupFn); - builtin.insert("lt", lt as LookupFn); - builtin.insert("lte", lte as LookupFn); - - // String lookups - builtin.insert("contains", contains as LookupFn); - builtin.insert("icontains", icontains as LookupFn); - builtin.insert("startswith", startswith as LookupFn); - builtin.insert("istartswith", istartswith as LookupFn); - builtin.insert("endswith", endswith as LookupFn); - builtin.insert("iendswith", iendswith as LookupFn); - - // Null lookups - // `isnull` is special: it ignores the value entirely and produces - // IS NULL / IS NOT NULL. The value passed (True/False) is read by - // the compiler, not by this function. - builtin.insert("isnull", isnull as LookupFn); - - // Membership lookups - // `in` is also special: the compiler expands it into - // `col IN (?, ?, ?)` based on the number of values provided. - builtin.insert("in", in_lookup as LookupFn); - - // Range lookup - builtin.insert("range", range as LookupFn); - - // Date/Time transforms (for chaining like created_at__date__gte) - // These are registered as lookups that return SQL fragments - builtin.insert("date", date_transform as LookupFn); - builtin.insert("year", year_transform as LookupFn); - builtin.insert("month", month_transform as LookupFn); - builtin.insert("day", day_transform as LookupFn); - builtin.insert("hour", hour_transform as LookupFn); - builtin.insert("minute", minute_transform as LookupFn); - builtin.insert("second", second_transform as LookupFn); - builtin.insert("week", week_transform as LookupFn); - builtin.insert("dow", dow_transform as LookupFn); - // New transforms - builtin.insert("quarter", quarter_transform as LookupFn); - builtin.insert("time", time_transform as LookupFn); - builtin.insert("iso_week", iso_week_transform as LookupFn); - builtin.insert("iso_dow", iso_dow_transform as LookupFn); - - // JSON transforms (for chaining like metadata__key__icontains) - builtin.insert("key", json_key_transform as LookupFn); - builtin.insert("key_text", json_key_text_transform as LookupFn); - builtin.insert("json", json_cast_transform as LookupFn); - - // JSON lookups (comparison operators) - builtin.insert("has_key", json_has_key as LookupFn); - builtin.insert("has_keys", json_has_keys as LookupFn); - builtin.insert("contains", json_contains as LookupFn); - builtin.insert("contained_by", json_contained_by as LookupFn); - - RwLock::new(LookupRegistry { - builtin, - custom: HashMap::new(), - }) - }); -} - -// -// Registry public API -// -/// Register a custom lookup from Python. -/// -/// # Arguments -/// * `name` — the lookup name (e.g. `"uuid_prefix"`) -/// * `sql_template` — SQL fragment with `{col}` as column placeholder and -/// `?` as value placeholder. Example: `"{col}::text LIKE ?"` -/// -/// # Errors -/// Returns `RyxError::Internal` if the registry hasn't been initialized -/// (should never happen in practice since `init_registry()` runs at import). -pub fn register_custom(name: impl Into, sql_template: impl Into) -> RyxResult<()> { - let registry = REGISTRY - .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; - - let mut guard = registry - .write() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; - - guard.custom.insert( - name.into(), - PythonLookup { - sql_template: sql_template.into(), - }, - ); - - Ok(()) -} - -// ### -// Chained lookups support (e.g., "date__gte", "year__month") -// ### - -/// Handle SQLite transform lookup when ctx.column already has transform applied -/// This happens when compiler applied the transform but lookup is still simple (e.g., "gte") -#[allow(dead_code)] -fn handle_sqlite_transform_lookup( - field: &str, - _transform: &str, - lookup_name: &str, - ctx: &LookupContext, -) -> RyxResult { - // Check if we need to convert TEXT to INTEGER for numeric comparisons - let is_numeric_comparison = matches!(lookup_name, "gt" | "gte" | "lt" | "lte" | "exact"); - - if is_numeric_comparison && ctx.column.contains("AS TEXT)") { - // Convert TEXT to INTEGER - let transformed = ctx.column.replace("AS TEXT)", "AS INTEGER)"); - let new_ctx = LookupContext { - column: transformed, - negated: ctx.negated, - backend: ctx.backend, - json_key: ctx.json_key.clone(), - }; - return resolve_simple(field, lookup_name, &new_ctx); - } - - // Otherwise, use as-is - resolve_simple(field, lookup_name, ctx) -} - -/// Resolve a chained lookup like "date__gte" or "year__exact". -/// This applies transforms first (date, year, month, etc.) then the final lookup. -pub fn resolve(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { - // If no "__", it's a simple lookup - if !lookup_name.contains("__") { - // Check if we have a JSON key that needs to be applied - if ctx.json_key.is_some() { - // We have a JSON key transform to apply - ALWAYS start fresh from field - let mut column = format!("\"{}\"", field); - // Apply the key transform with the json_key - column = apply_transform("key", &column, ctx.backend, ctx.json_key.as_deref())?; - - // Build new context with transformed column - let json_ctx = LookupContext { - column: column.clone(), - negated: ctx.negated, - backend: ctx.backend, - json_key: None, - }; - return resolve_simple(field, lookup_name, &json_ctx); - } - - // Check if ctx.column already has a date/time transform applied (e.g., from compiler) - // Handle the case where compiler applied transform but lookup is simple (e.g., "gte") - if ctx.column.contains("strftime") || ctx.column.contains("DATE(") { - // Detect transform type from SQL - if ctx.column.contains("strftime('%Y'") { - return handle_sqlite_transform_lookup(field, "year", lookup_name, ctx); - } else if ctx.column.contains("strftime('%m'") { - return handle_sqlite_transform_lookup(field, "month", lookup_name, ctx); - } else if ctx.column.contains("strftime('%d'") { - return handle_sqlite_transform_lookup(field, "day", lookup_name, ctx); - } else if ctx.column.contains("strftime('%H'") { - return handle_sqlite_transform_lookup(field, "hour", lookup_name, ctx); - } - // For DATE() transform, we need different handling for comparisons - if ctx.column.starts_with("DATE(") { - return resolve_simple(field, lookup_name, ctx); - } - } - return resolve_simple(field, lookup_name, ctx); - } - - // Chained: split into transforms + final lookup - let parts: Vec<&str> = lookup_name.split("__").collect(); - let final_lookup = *parts.last().unwrap(); - let transform_parts: Vec<&str> = parts[..parts.len() - 1].to_vec(); - - // Start fresh from the base column - don't use ctx.column which may already have transforms - let mut column = format!("\"{}\"", field); - - // Apply transforms in order until we hit a lookup - // For JSON transforms like "key", use ctx.json_key if available - for transform in transform_parts.iter() { - // Check if this is a known transform - let is_transform = matches!( - *transform, - "date" - | "year" - | "month" - | "day" - | "hour" - | "minute" - | "second" - | "week" - | "dow" - | "quarter" - | "time" - | "iso_week" - | "iso_dow" - | "key" - | "key_text" - | "json" - ); - - if is_transform { - // For JSON transforms (key, key_text), use json_key from context if available - let key = if matches!(*transform, "key" | "key_text") { - ctx.json_key - .as_deref() - .or_else(|| field.rsplit("__").next()) - } else { - None - }; - column = apply_transform(transform, &column, ctx.backend, key)?; - } else { - // This part is a lookup, not a transform - stop here - break; - } - } - - // Build a new context with the transformed column - let final_ctx = LookupContext { - column: column.clone(), - negated: ctx.negated, - backend: ctx.backend, - json_key: ctx.json_key.clone(), - }; - - // For SQLite, handle type conversion for comparisons on transformed values - if ctx.backend == Backend::SQLite { - // Check if the column contains a date/time transform - let col_has_transform = column.contains("strftime"); - - if col_has_transform && !column.contains("AS INTEGER") { - // Column is TEXT from a transform, need to convert for numeric comparisons - let is_numeric_comparison = - matches!(final_lookup, "gt" | "gte" | "lt" | "lte" | "exact"); - - if is_numeric_comparison { - // Convert TEXT to INTEGER by replacing AS TEXT with AS INTEGER - let transformed = column.replace("AS TEXT)", "AS INTEGER)"); - let final_ctx_int = LookupContext { - column: transformed, - negated: ctx.negated, - backend: ctx.backend, - json_key: ctx.json_key.clone(), - }; - return resolve_simple(field, final_lookup, &final_ctx_int); - } - - // For non-numeric comparisons, cast the bind value - let fragment = resolve_simple(field, final_lookup, &final_ctx)?; - return Ok(add_sqlite_cast_for_transform(&fragment, final_lookup)); - } - } - - // Default: resolve normally - resolve_simple(field, final_lookup, &final_ctx) -} - -#[allow(dead_code)] -/// Convert a SQLite transform expression from TEXT to INTEGER for numeric comparisons -fn convert_transform_to_integer(column: &str) -> String { - // Replace CAST(...AS TEXT) with CAST(...AS INTEGER) - column.replace("AS TEXT)", "AS INTEGER)") -} - -/// Add CAST(? AS TEXT) for SQLite date/time transform comparisons -fn add_sqlite_cast_for_transform(fragment: &str, lookup: &str) -> String { - // For lookups that use = ?, replace = ? with = CAST(? AS TEXT) - // For lookups that use > ?, etc., replace with > CAST(? AS TEXT) - - match lookup { - "exact" => fragment.replace("= ?", "= CAST(? AS TEXT)"), - "gt" => fragment.replace("> ?", "> CAST(? AS TEXT)"), - "gte" => fragment.replace(">= ?", ">= CAST(? AS TEXT)"), - "lt" => fragment.replace("< ?", "< CAST(? AS TEXT)"), - "lte" => fragment.replace("<= ?", "<= CAST(? AS TEXT)"), - _ => fragment.to_string(), - } -} - -/// Resolve a simple (non-chained) lookup. -fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { - let registry = REGISTRY - .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; - - let guard = registry - .read() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; - - // Check custom registry first (allows overriding built-ins) - if let Some(custom) = guard.custom.get(lookup_name) { - return Ok(custom.sql_template.replace("{col}", &ctx.column)); - } - - // Fall back to built-in lookup functions - if let Some(lookup_fn) = guard.builtin.get(lookup_name) { - return Ok(lookup_fn(ctx)); - } - - Err(RyxError::UnknownLookup { - field: field.to_string(), - lookup: lookup_name.to_string(), - }) -} - -/// Returns the list of all registered lookup names (built-in + custom). -/// Used by the Python layer to provide helpful error messages and IDE -/// autocompletion support. -pub fn registered_lookups() -> RyxResult> { - let registry = REGISTRY - .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; - - let guard = registry - .read() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; - - let mut names: Vec = guard - .builtin - .keys() - .map(|k| k.to_string()) - .chain(guard.custom.keys().cloned()) - .collect(); - names.sort(); - Ok(names) -} - -/// Apply a field transformation (date, year, month, key, etc.) -/// Returns SQL like "DATE(col)" or "EXTRACT(YEAR FROM col)" -/// For JSON transforms (key, key_text), the key is extracted from the next part of the chain -pub fn apply_transform( - name: &str, - column: &str, - backend: Backend, - key: Option<&str>, -) -> RyxResult { - let sql = match (name, backend) { - // Date/Time transforms - ("date", _) => format!("DATE({})", column), - - ("year", Backend::PostgreSQL) => format!("EXTRACT(YEAR FROM {})", column), - ("year", Backend::MySQL) => format!("YEAR({})", column), - ("year", Backend::SQLite) => format!("CAST(strftime('%Y', {}) AS TEXT)", column), - - ("month", Backend::PostgreSQL) => format!("EXTRACT(MONTH FROM {})", column), - ("month", Backend::MySQL) => format!("MONTH({})", column), - ("month", Backend::SQLite) => format!("CAST(strftime('%m', {}) AS TEXT)", column), - - ("day", Backend::PostgreSQL) => format!("EXTRACT(DAY FROM {})", column), - ("day", Backend::MySQL) => format!("DAYOFMONTH({})", column), - ("day", Backend::SQLite) => format!("CAST(strftime('%d', {}) AS TEXT)", column), - - ("hour", Backend::PostgreSQL) => format!("EXTRACT(HOUR FROM {})", column), - ("hour", Backend::MySQL) => format!("HOUR({})", column), - ("hour", Backend::SQLite) => format!("CAST(strftime('%H', {}) AS TEXT)", column), - - ("minute", Backend::PostgreSQL) => format!("EXTRACT(MINUTE FROM {})", column), - ("minute", Backend::MySQL) => format!("MINUTE({})", column), - ("minute", Backend::SQLite) => format!("CAST(strftime('%M', {}) AS TEXT)", column), - - ("second", Backend::PostgreSQL) => format!("EXTRACT(SECOND FROM {})", column), - ("second", Backend::MySQL) => format!("SECOND({})", column), - ("second", Backend::SQLite) => format!("CAST(strftime('%S', {}) AS TEXT)", column), - - ("week", Backend::PostgreSQL) => format!("EXTRACT(WEEK FROM {})", column), - ("week", Backend::MySQL) => format!("WEEK({})", column), - ("week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), - - ("dow", Backend::PostgreSQL) => format!("EXTRACT(DOW FROM {})", column), - ("dow", Backend::MySQL) => format!("DAYOFWEEK({})", column), - ("dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), - - // New Date/Time transforms - ("quarter", Backend::PostgreSQL) => format!("EXTRACT(QUARTER FROM {})", column), - ("quarter", Backend::MySQL) => format!("QUARTER({})", column), - ("quarter", Backend::SQLite) => format!( - "CAST((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3 AS TEXT)", - column - ), - - ("time", Backend::PostgreSQL) => format!("TIME({})", column), - ("time", Backend::MySQL) => format!("TIME({})", column), - ("time", Backend::SQLite) => format!("time({})", column), - - ("iso_week", Backend::PostgreSQL) => format!("EXTRACT(ISOWEEK FROM {})", column), - ("iso_week", Backend::MySQL) => format!( - "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1", - column, column, column - ), - ("iso_week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), - - ("iso_dow", Backend::PostgreSQL) => format!("EXTRACT(ISODOW FROM {})", column), - ("iso_dow", Backend::MySQL) => format!("((DAYOFWEEK({}) + 5) % 7) + 1", column), - ("iso_dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), - - // JSON transforms (key extraction) - key comes from the next part of the chain - ("key", Backend::PostgreSQL) => { - let k = key.unwrap_or("key"); - format!("({}->>'{}')", column, k) - } - ("key", Backend::MySQL) => { - let k = key.unwrap_or("key"); - format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.{}'))", column, k) - } - ("key", Backend::SQLite) => { - let k = key.unwrap_or("key"); - format!("json_extract({}, '$.{}')", column, k) - } - - ("key_text", Backend::PostgreSQL) => { - let k = key.unwrap_or("key"); - format!("({}->>'{}')::text", column, k) - } - ("key_text", Backend::MySQL) => { - let k = key.unwrap_or("key"); - format!( - "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '.{}')) AS CHAR)", - column, k - ) - } - ("key_text", Backend::SQLite) => { - let k = key.unwrap_or("key"); - format!("CAST(json_extract({}, '.{}') AS TEXT)", column, k) - } - - ("json", Backend::PostgreSQL) => format!("({}::jsonb)", column), - ("json", Backend::MySQL) => column.to_string(), - ("json", Backend::SQLite) => column.to_string(), - - _ => { - return Err(RyxError::UnknownLookup { - field: column.to_string(), - lookup: name.to_string(), - }) - } - }; - - Ok(sql) -} - -// ### -// Built-in lookup implementations -// -// Each function takes a `LookupContext` and returns a SQL fragment. -// Rules: -// - Always use `?` as the value placeholder -// - Never include the value itself (SQL injection prevention) -// - Column name is already safely quoted by the query builder -// ### - -/// `field__exact=value` → `field = ?` -/// -/// This is also the *implicit* lookup: `filter(name="Alice")` is equivalent -/// to `filter(name__exact="Alice")`. -fn exact(ctx: &LookupContext) -> String { - format!("{} = ?", ctx.column) -} - -/// `field__gt=value` → `field > ?` -fn gt(ctx: &LookupContext) -> String { - format!("{} > ?", ctx.column) -} - -/// `field__gte=value` → `field >= ?` -fn gte(ctx: &LookupContext) -> String { - format!("{} >= ?", ctx.column) -} - -/// `field__lt=value` → `field < ?` -fn lt(ctx: &LookupContext) -> String { - format!("{} < ?", ctx.column) -} - -/// `field__lte=value` → `field <= ?` -fn lte(ctx: &LookupContext) -> String { - format!("{} <= ?", ctx.column) -} - -/// `field__contains="bob"` → `field LIKE ?` (with `%value%` at bind time) -/// -/// Case-sensitive substring match. The `%` wrapping is applied by the -/// executor when binding the value, not in the SQL fragment itself. -fn contains(ctx: &LookupContext) -> String { - format!("{} LIKE ?", ctx.column) -} - -/// `field__icontains="bob"` → `LOWER(field) LIKE LOWER(?)` -/// -/// Case-insensitive substring match. Works on all backends without relying -/// on PostgreSQL-specific `ILIKE`. The `%value%` wrapping happens at bind time. -fn icontains(ctx: &LookupContext) -> String { - format!("LOWER({}) LIKE LOWER(?)", ctx.column) -} - -/// `field__startswith="pr"` → `field LIKE ?` (with `value%` at bind time) -fn startswith(ctx: &LookupContext) -> String { - format!("{} LIKE ?", ctx.column) -} - -/// `field__istartswith="pr"` → `LOWER(field) LIKE LOWER(?)` -fn istartswith(ctx: &LookupContext) -> String { - format!("LOWER({}) LIKE LOWER(?)", ctx.column) -} - -/// `field__endswith="ing"` → `field LIKE ?` (with `%value` at bind time) -fn endswith(ctx: &LookupContext) -> String { - format!("{} LIKE ?", ctx.column) -} - -/// `field__iendswith="ing"` → `LOWER(field) LIKE LOWER(?)` -fn iendswith(ctx: &LookupContext) -> String { - format!("LOWER({}) LIKE LOWER(?)", ctx.column) -} - -/// `field__isnull=True` → `field IS NULL` -/// `field__isnull=False` → `field IS NOT NULL` -/// -/// Note: the True/False distinction is handled by the compiler which reads the -/// bound value. This function always returns the IS NULL form; the compiler -/// swaps to IS NOT NULL when the value is False/0. -fn isnull(ctx: &LookupContext) -> String { - // The compiler reads the Python boolean and rewrites this. - // We return the base form here. - format!("{} IS NULL", ctx.column) -} - -/// `field__in=[1, 2, 3]` → `field IN (?, ?, ?)` -/// -/// Note: this returns a *template* — the compiler replaces `(?)` with -/// the correct number of placeholders based on the list length. -fn in_lookup(ctx: &LookupContext) -> String { - // Single `?` — compiler expands to `(?, ?, ...)` based on value count - format!("{} IN (?)", ctx.column) -} - -/// `field__range=(low, high)` → `field BETWEEN ? AND ?` -/// -/// Uses two bind parameters. The compiler handles this specially. -fn range(ctx: &LookupContext) -> String { - format!("{} BETWEEN ? AND ?", ctx.column) -} - -// ### -// Date/Time Transform Functions (for chained lookups) -// ### - -/// `field__date` → `DATE(field)` (backend-aware) - implicit equality -pub fn date_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("DATE({}) = ?", ctx.column), - Backend::MySQL => format!("DATE({}) = ?", ctx.column), - Backend::SQLite => format!("date({}) = CAST(? AS TEXT)", ctx.column), - } -} - -/// `field__year` → `EXTRACT(YEAR FROM field)` or `YEAR(field)` (backend-aware) - implicit equality -pub fn year_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(YEAR FROM {}) = ?", ctx.column), - Backend::MySQL => format!("YEAR({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%Y', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__month` → `EXTRACT(MONTH FROM field)` or `MONTH(field)` (backend-aware) - implicit equality -pub fn month_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(MONTH FROM {}) = ?", ctx.column), - Backend::MySQL => format!("MONTH({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%m', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__day` → `EXTRACT(DAY FROM field)` or `DAY(field)` (backend-aware) - implicit equality -pub fn day_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(DAY FROM {}) = ?", ctx.column), - Backend::MySQL => format!("DAYOFMONTH({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%d', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__hour` → `EXTRACT(HOUR FROM field)` or `HOUR(field)` (backend-aware) - implicit equality -pub fn hour_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(HOUR FROM {}) = ?", ctx.column), - Backend::MySQL => format!("HOUR({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%H', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__minute` → `EXTRACT(MINUTE FROM field)` or `MINUTE(field)` (backend-aware) - implicit equality -pub fn minute_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(MINUTE FROM {}) = ?", ctx.column), - Backend::MySQL => format!("MINUTE({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%M', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__second` → `EXTRACT(SECOND FROM field)` or `SECOND(field)` (backend-aware) - implicit equality -pub fn second_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(SECOND FROM {}) = ?", ctx.column), - Backend::MySQL => format!("SECOND({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%S', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__week` → `EXTRACT(WEEK FROM field)` or `WEEK(field)` (backend-aware) - implicit equality -pub fn week_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(WEEK FROM {}) = ?", ctx.column), - Backend::MySQL => format!("WEEK({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__dow` → `EXTRACT(DOW FROM field)` or `DAYOFWEEK(field)` (backend-aware) - implicit equality -pub fn dow_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(DOW FROM {}) = ?", ctx.column), - Backend::MySQL => format!("DAYOFWEEK({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__quarter` → `EXTRACT(QUARTER FROM field)` or `QUARTER(field)` (backend-aware) - implicit equality -pub fn quarter_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(QUARTER FROM {}) = ?", ctx.column), - Backend::MySQL => format!("QUARTER({}) = ?", ctx.column), - Backend::SQLite => format!( - "((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3) = ?", - ctx.column - ), - } -} - -/// `field__time` → `TIME(field)` or equivalent (backend-aware) - implicit equality -pub fn time_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("TIME({}) = ?", ctx.column), - Backend::MySQL => format!("TIME({}) = ?", ctx.column), - Backend::SQLite => format!("time({}) = ?", ctx.column), - } -} - -/// `field__iso_week` → `EXTRACT(ISOWEEK FROM field)` or equivalent (backend-aware) - implicit equality -pub fn iso_week_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(ISOWEEK FROM {}) = ?", ctx.column), - Backend::MySQL => format!( - "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1 = ?", - ctx.column, ctx.column, ctx.column - ), - Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__iso_dow` → `EXTRACT(ISODOW FROM field)` or equivalent (backend-aware) - implicit equality -pub fn iso_dow_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(ISODOW FROM {}) = ?", ctx.column), - Backend::MySQL => format!("((DAYOFWEEK({}) + 5) % 7) + 1 = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), - } -} - -// ### -// JSON Transform Functions (for chained lookups) -// ### - -/// `field__key` → `(field->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(field, '$.key'))` -pub fn json_key_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({}->>'key')", ctx.column), - Backend::MySQL => format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.key'))", ctx.column), - Backend::SQLite => format!("json_extract({}, '$.key')", ctx.column), - } -} - -/// `field__key_text` → `(field->>'key')::text` (for text comparisons like icontains) -pub fn json_key_text_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({}->>'key')::text", ctx.column), - Backend::MySQL => format!( - "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '$.key')) AS CHAR)", - ctx.column - ), - Backend::SQLite => format!("CAST(json_extract({}, '$.key') AS TEXT)", ctx.column), - } -} - -/// `field__json` → `field::jsonb` (PostgreSQL) or just field (MySQL/SQLite) -pub fn json_cast_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({}::jsonb)", ctx.column), - Backend::MySQL => ctx.column.clone(), - Backend::SQLite => ctx.column.clone(), - } -} - -// ### -// JSON Lookup Functions (comparison operators) -// ### - -/// `field__has_key="key"` → `field ? 'key'` (PostgreSQL) or `JSON_CONTAINS(field, '"key"')` (MySQL) -fn json_has_key(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} ? 'key')", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, '\"key\"')", ctx.column), - Backend::SQLite => format!("json_extract({}, '$.key') IS NOT NULL", ctx.column), - } -} - -/// `field__has_keys=['key1', 'key2']` → `field ?& array['key1', 'key2']` -fn json_has_keys(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} ?& array['key1', 'key2'])", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, '[\"key1\", \"key2\"]')", ctx.column), - Backend::SQLite => format!( - "json_extract({}, '$.key1') IS NOT NULL AND json_extract({}, '$.key2') IS NOT NULL", - ctx.column, ctx.column - ), - } -} - -/// `field__contains={"key": "value"}` → `field @> ?` (PostgreSQL) -fn json_contains(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} @> ?)", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, ?)", ctx.column), - Backend::SQLite => ctx.column.clone(), // Limited support in SQLite - } -} - -/// `field__contained_by={"key": "value"}` → `field <@ ?` (PostgreSQL) -fn json_contained_by(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} <@ ?)", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS(?, {})", ctx.column), - Backend::SQLite => ctx.column.clone(), // Limited support in SQLite - } -} diff --git a/src/query/lookups/common_lookups.rs b/src/query/lookups/common_lookups.rs new file mode 100644 index 0000000..880d2b1 --- /dev/null +++ b/src/query/lookups/common_lookups.rs @@ -0,0 +1,101 @@ +// +// ### +// Ryx — Common Lookups +// ### +// +// Contains comparison and string lookups (exact, gt, contains, etc.) +// ### + +use crate::query::lookups::LookupContext; + +pub use crate::query::lookups::LookupFn; +pub use crate::query::lookups::PythonLookup; + +/// `field__exact=value` → `field = ?` +/// +/// This is also the *implicit* lookup: `filter(name="Alice")` is equivalent +/// to `filter(name__exact="Alice")`. +pub fn exact(ctx: &LookupContext) -> String { + format!("{} = ?", ctx.column) +} + +/// `field__gt=value` → `field > ?` +pub fn gt(ctx: &LookupContext) -> String { + format!("{} > ?", ctx.column) +} + +/// `field__gte=value` → `field >= ?` +pub fn gte(ctx: &LookupContext) -> String { + format!("{} >= ?", ctx.column) +} + +/// `field__lt=value` → `field < ?` +pub fn lt(ctx: &LookupContext) -> String { + format!("{} < ?", ctx.column) +} + +/// `field__lte=value` → `field <= ?` +pub fn lte(ctx: &LookupContext) -> String { + format!("{} <= ?", ctx.column) +} + +/// `field__contains="bob"` → `field LIKE ?` (with `%value%` at bind time) +/// +/// Case-sensitive substring match. The `%` wrapping is applied by the +/// executor when binding the value, not in the SQL fragment itself. +pub fn contains(ctx: &LookupContext) -> String { + format!("{} LIKE ?", ctx.column) +} + +/// `field__icontains="bob"` → `LOWER(field) LIKE LOWER(?)` +/// +/// Case-insensitive substring match. Works on all backends without relying +/// on PostgreSQL-specific `ILIKE`. The `%value%` wrapping happens at bind time. +pub fn icontains(ctx: &LookupContext) -> String { + format!("LOWER({}) LIKE LOWER(?)", ctx.column) +} + +/// `field__startswith="pr"` → `field LIKE ?` (with `value%` at bind time) +pub fn startswith(ctx: &LookupContext) -> String { + format!("{} LIKE ?", ctx.column) +} + +/// `field__istartswith="pr"` → `LOWER(field) LIKE LOWER(?)` +pub fn istartswith(ctx: &LookupContext) -> String { + format!("LOWER({}) LIKE LOWER(?)", ctx.column) +} + +/// `field__endswith="ing"` → `field LIKE ?` (with `%value` at bind time) +pub fn endswith(ctx: &LookupContext) -> String { + format!("{} LIKE ?", ctx.column) +} + +/// `field__iendswith="ing"` → `LOWER(field) LIKE LOWER(?)` +pub fn iendswith(ctx: &LookupContext) -> String { + format!("LOWER({}) LIKE LOWER(?)", ctx.column) +} + +/// `field__isnull=True` → `field IS NULL` +/// `field__isnull=False` → `field IS NOT NULL` +/// +/// Note: the True/False distinction is handled by the compiler which reads the +/// bound value. This function always returns the IS NULL form; the compiler +/// swaps to IS NOT NULL when the value is False/0. +pub fn isnull(ctx: &LookupContext) -> String { + format!("{} IS NULL", ctx.column) +} + +/// `field__in=[1, 2, 3]` → `field IN (?, ?, ?)` +/// +/// Note: this returns a *template* — the compiler replaces `(?)` with +/// the correct number of placeholders based on the list length. +pub fn in_lookup(ctx: &LookupContext) -> String { + format!("{} IN (?)", ctx.column) +} + +/// `field__range=(low, high)` → `field BETWEEN ? AND ?` +/// +/// Uses two bind parameters. The compiler handles this specially. +pub fn range(ctx: &LookupContext) -> String { + format!("{} BETWEEN ? AND ?", ctx.column) +} diff --git a/src/query/lookups/date_lookups.rs b/src/query/lookups/date_lookups.rs new file mode 100644 index 0000000..323c4d8 --- /dev/null +++ b/src/query/lookups/date_lookups.rs @@ -0,0 +1,201 @@ +// +// ### +// Ryx — Date/Time Lookups +// ### +// +// Contains date/time transforms (year, month, day, hour, etc.) and apply_transform logic. +// These are used for chained lookups like `created_at__year__gte=2024` +// ### + +use crate::pool::Backend; +use crate::query::lookups::LookupContext; + +pub use crate::query::lookups::LookupFn; + +/// Apply a date/time field transformation. +/// Returns SQL like "DATE(col)" or "EXTRACT(YEAR FROM col)" +pub fn apply_date_transform(name: &str, column: &str, backend: Backend) -> Option { + let sql = match (name, backend) { + ("date", _) => format!("DATE({})", column), + + ("year", Backend::PostgreSQL) => format!("EXTRACT(YEAR FROM {})", column), + ("year", Backend::MySQL) => format!("YEAR({})", column), + ("year", Backend::SQLite) => format!("CAST(strftime('%Y', {}) AS TEXT)", column), + + ("month", Backend::PostgreSQL) => format!("EXTRACT(MONTH FROM {})", column), + ("month", Backend::MySQL) => format!("MONTH({})", column), + ("month", Backend::SQLite) => format!("CAST(strftime('%m', {}) AS TEXT)", column), + + ("day", Backend::PostgreSQL) => format!("EXTRACT(DAY FROM {})", column), + ("day", Backend::MySQL) => format!("DAYOFMONTH({})", column), + ("day", Backend::SQLite) => format!("CAST(strftime('%d', {}) AS TEXT)", column), + + ("hour", Backend::PostgreSQL) => format!("EXTRACT(HOUR FROM {})", column), + ("hour", Backend::MySQL) => format!("HOUR({})", column), + ("hour", Backend::SQLite) => format!("CAST(strftime('%H', {}) AS TEXT)", column), + + ("minute", Backend::PostgreSQL) => format!("EXTRACT(MINUTE FROM {})", column), + ("minute", Backend::MySQL) => format!("MINUTE({})", column), + ("minute", Backend::SQLite) => format!("CAST(strftime('%M', {}) AS TEXT)", column), + + ("second", Backend::PostgreSQL) => format!("EXTRACT(SECOND FROM {})", column), + ("second", Backend::MySQL) => format!("SECOND({})", column), + ("second", Backend::SQLite) => format!("CAST(strftime('%S', {}) AS TEXT)", column), + + ("week", Backend::PostgreSQL) => format!("EXTRACT(WEEK FROM {})", column), + ("week", Backend::MySQL) => format!("WEEK({})", column), + ("week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), + + ("dow", Backend::PostgreSQL) => format!("EXTRACT(DOW FROM {})", column), + ("dow", Backend::MySQL) => format!("DAYOFWEEK({})", column), + ("dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), + + ("quarter", Backend::PostgreSQL) => format!("EXTRACT(QUARTER FROM {})", column), + ("quarter", Backend::MySQL) => format!("QUARTER({})", column), + ("quarter", Backend::SQLite) => format!( + "CAST((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3 AS TEXT)", + column + ), + + ("time", Backend::PostgreSQL) => format!("TIME({})", column), + ("time", Backend::MySQL) => format!("TIME({})", column), + ("time", Backend::SQLite) => format!("time({})", column), + + ("iso_week", Backend::PostgreSQL) => format!("EXTRACT(ISOWEEK FROM {})", column), + ("iso_week", Backend::MySQL) => format!( + "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1", + column, column, column + ), + ("iso_week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), + + ("iso_dow", Backend::PostgreSQL) => format!("EXTRACT(ISODOW FROM {})", column), + ("iso_dow", Backend::MySQL) => format!("((DAYOFWEEK({}) + 5) % 7) + 1", column), + ("iso_dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), + + _ => return None, + }; + Some(sql) +} + +/// `field__date` → `DATE(field)` (backend-aware) - implicit equality +pub fn date_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("DATE({}) = ?", ctx.column), + Backend::MySQL => format!("DATE({}) = ?", ctx.column), + Backend::SQLite => format!("date({}) = CAST(? AS TEXT)", ctx.column), + } +} + +/// `field__year` → `EXTRACT(YEAR FROM field)` or `YEAR(field)` (backend-aware) - implicit equality +pub fn year_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(YEAR FROM {}) = ?", ctx.column), + Backend::MySQL => format!("YEAR({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%Y', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__month` → `EXTRACT(MONTH FROM field)` or `MONTH(field)` (backend-aware) - implicit equality +pub fn month_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(MONTH FROM {}) = ?", ctx.column), + Backend::MySQL => format!("MONTH({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%m', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__day` → `EXTRACT(DAY FROM field)` or `DAY(field)` (backend-aware) - implicit equality +pub fn day_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(DAY FROM {}) = ?", ctx.column), + Backend::MySQL => format!("DAYOFMONTH({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%d', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__hour` → `EXTRACT(HOUR FROM field)` or `HOUR(field)` (backend-aware) - implicit equality +pub fn hour_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(HOUR FROM {}) = ?", ctx.column), + Backend::MySQL => format!("HOUR({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%H', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__minute` → `EXTRACT(MINUTE FROM field)` or `MINUTE(field)` (backend-aware) - implicit equality +pub fn minute_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(MINUTE FROM {}) = ?", ctx.column), + Backend::MySQL => format!("MINUTE({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%M', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__second` → `EXTRACT(SECOND FROM field)` or `SECOND(field)` (backend-aware) - implicit equality +pub fn second_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(SECOND FROM {}) = ?", ctx.column), + Backend::MySQL => format!("SECOND({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%S', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__week` → `EXTRACT(WEEK FROM field)` or `WEEK(field)` (backend-aware) - implicit equality +pub fn week_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(WEEK FROM {}) = ?", ctx.column), + Backend::MySQL => format!("WEEK({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__dow` → `EXTRACT(DOW FROM field)` or `DAYOFWEEK(field)` (backend-aware) - implicit equality +pub fn dow_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(DOW FROM {}) = ?", ctx.column), + Backend::MySQL => format!("DAYOFWEEK({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__quarter` → `EXTRACT(QUARTER FROM field)` or `QUARTER(field)` (backend-aware) - implicit equality +pub fn quarter_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(QUARTER FROM {}) = ?", ctx.column), + Backend::MySQL => format!("QUARTER({}) = ?", ctx.column), + Backend::SQLite => format!( + "((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3) = ?", + ctx.column + ), + } +} + +/// `field__time` → `TIME(field)` or equivalent (backend-aware) - implicit equality +pub fn time_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("TIME({}) = ?", ctx.column), + Backend::MySQL => format!("TIME({}) = ?", ctx.column), + Backend::SQLite => format!("time({}) = ?", ctx.column), + } +} + +/// `field__iso_week` → `EXTRACT(ISOWEEK FROM field)` or equivalent (backend-aware) - implicit equality +pub fn iso_week_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(ISOWEEK FROM {}) = ?", ctx.column), + Backend::MySQL => format!( + "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1 = ?", + ctx.column, ctx.column, ctx.column + ), + Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__iso_dow` → `EXTRACT(ISODOW FROM field)` or equivalent (backend-aware) - implicit equality +pub fn iso_dow_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(ISODOW FROM {}) = ?", ctx.column), + Backend::MySQL => format!("((DAYOFWEEK({}) + 5) % 7) + 1 = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), + } +} diff --git a/src/query/lookups/json_lookups.rs b/src/query/lookups/json_lookups.rs new file mode 100644 index 0000000..beb7401 --- /dev/null +++ b/src/query/lookups/json_lookups.rs @@ -0,0 +1,129 @@ +// +// ### +// Ryx — JSON Lookups +// ### +// +// Contains JSON transforms and lookups (key, has_key, contains, etc.) +// These are used for chained lookups like `metadata__key__priority__exact="high"` +// ### + +use crate::pool::Backend; +use crate::query::lookups::LookupContext; + +pub use crate::query::lookups::LookupFn; + +/// Apply a JSON field transformation. +/// Returns SQL like `(col->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(col, '$.key'))` +pub fn apply_json_transform( + name: &str, + column: &str, + backend: Backend, + key: Option<&str>, +) -> Option { + let sql = match (name, backend) { + ("key", Backend::PostgreSQL) => { + let k = key.unwrap_or("key"); + format!("({}->>'{}')", column, k) + } + ("key", Backend::MySQL) => { + let k = key.unwrap_or("key"); + format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.{}'))", column, k) + } + ("key", Backend::SQLite) => { + let k = key.unwrap_or("key"); + format!("json_extract({}, '$.{}')", column, k) + } + + ("key_text", Backend::PostgreSQL) => { + let k = key.unwrap_or("key"); + format!("({}->>'{}')::text", column, k) + } + ("key_text", Backend::MySQL) => { + let k = key.unwrap_or("key"); + format!( + "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '.{}')) AS CHAR)", + column, k + ) + } + ("key_text", Backend::SQLite) => { + let k = key.unwrap_or("key"); + format!("CAST(json_extract({}, '.{}') AS TEXT)", column, k) + } + + ("json", Backend::PostgreSQL) => format!("({}::jsonb)", column), + ("json", Backend::MySQL) => column.to_string(), + ("json", Backend::SQLite) => column.to_string(), + + _ => return None, + }; + Some(sql) +} + +/// `field__key` → `(field->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(field, '$.key'))` +pub fn json_key_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({}->>'key')", ctx.column), + Backend::MySQL => format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.key'))", ctx.column), + Backend::SQLite => format!("json_extract({}, '$.key')", ctx.column), + } +} + +/// `field__key_text` → `(field->>'key')::text` (for text comparisons like icontains) +pub fn json_key_text_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({}->>'key')::text", ctx.column), + Backend::MySQL => format!( + "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '$.key')) AS CHAR)", + ctx.column + ), + Backend::SQLite => format!("CAST(json_extract({}, '$.key') AS TEXT)", ctx.column), + } +} + +/// `field__json` → `field::jsonb` (PostgreSQL) or just field (MySQL/SQLite) +pub fn json_cast_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({}::jsonb)", ctx.column), + Backend::MySQL => ctx.column.clone(), + Backend::SQLite => ctx.column.clone(), + } +} + +/// `field__has_key="key"` → `field ? 'key'` (PostgreSQL) or `JSON_CONTAINS(field, '"key"')` (MySQL) +pub fn json_has_key(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} ? 'key')", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS({}, '\"key\"')", ctx.column), + Backend::SQLite => format!("json_extract({}, '$.key') IS NOT NULL", ctx.column), + } +} + +/// `field__has_keys=['key1', 'key2']` → `field ?& array['key1', 'key2']` +pub fn json_has_keys(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} ?& array['key1', 'key2'])", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS({}, '[\"key1\", \"key2\"]')", ctx.column), + Backend::SQLite => format!( + "json_extract({}, '$.key1') IS NOT NULL AND json_extract({}, '$.key2') IS NOT NULL", + ctx.column, ctx.column + ), + } +} + +/// `field__contains={"key": "value"}` → `field @> ?` (PostgreSQL) +pub fn json_contains(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} @> ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS({}, ?)", ctx.column), + Backend::SQLite => ctx.column.clone(), // Limited support in SQLite + } +} + +/// `field__contained_by={"key": "value"}` → `field <@ ?` (PostgreSQL) +pub fn json_contained_by(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} <@ ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS(?, {})", ctx.column), + Backend::SQLite => ctx.column.clone(), // Limited support in SQLite + } +} diff --git a/src/query/lookups/lookups.rs b/src/query/lookups/lookups.rs new file mode 100644 index 0000000..394b7d8 --- /dev/null +++ b/src/query/lookups/lookups.rs @@ -0,0 +1,336 @@ +// +// ### +// Ryx — Lookups Implementation +// ### +// +// Contains core types, registry, and resolve logic for the lookup system. +// This is the main implementation file - mod.rs just re-exports from here. +// ### + +use std::collections::HashMap; +use std::sync::{OnceLock, RwLock}; + +use crate::errors::{RyxError, RyxResult}; +use crate::pool::Backend; + +// Re-export submodules +pub use crate::query::lookups::common_lookups; +pub use crate::query::lookups::date_lookups; +pub use crate::query::lookups::json_lookups; + +// ### +// Core types +// ### + +#[derive(Debug, Clone)] +pub struct LookupContext { + pub column: String, + pub negated: bool, + pub backend: Backend, + pub json_key: Option, +} + +pub type LookupFn = fn(&LookupContext) -> String; + +#[derive(Debug, Clone)] +pub struct PythonLookup { + pub sql_template: String, +} + +// ### +// Global lookup registry +// ### + +struct LookupRegistry { + builtin: HashMap<&'static str, LookupFn>, + custom: HashMap, +} + +static REGISTRY: OnceLock> = OnceLock::new(); + +pub fn init_registry() { + REGISTRY.get_or_init(|| { + let mut builtin = HashMap::new(); + + builtin.insert("exact", common_lookups::exact as LookupFn); + builtin.insert("gt", common_lookups::gt as LookupFn); + builtin.insert("gte", common_lookups::gte as LookupFn); + builtin.insert("lt", common_lookups::lt as LookupFn); + builtin.insert("lte", common_lookups::lte as LookupFn); + + builtin.insert("contains", common_lookups::contains as LookupFn); + builtin.insert("icontains", common_lookups::icontains as LookupFn); + builtin.insert("startswith", common_lookups::startswith as LookupFn); + builtin.insert("istartswith", common_lookups::istartswith as LookupFn); + builtin.insert("endswith", common_lookups::endswith as LookupFn); + builtin.insert("iendswith", common_lookups::iendswith as LookupFn); + + builtin.insert("isnull", common_lookups::isnull as LookupFn); + builtin.insert("in", common_lookups::in_lookup as LookupFn); + builtin.insert("range", common_lookups::range as LookupFn); + + builtin.insert("date", date_lookups::date_transform as LookupFn); + builtin.insert("year", date_lookups::year_transform as LookupFn); + builtin.insert("month", date_lookups::month_transform as LookupFn); + builtin.insert("day", date_lookups::day_transform as LookupFn); + builtin.insert("hour", date_lookups::hour_transform as LookupFn); + builtin.insert("minute", date_lookups::minute_transform as LookupFn); + builtin.insert("second", date_lookups::second_transform as LookupFn); + builtin.insert("week", date_lookups::week_transform as LookupFn); + builtin.insert("dow", date_lookups::dow_transform as LookupFn); + builtin.insert("quarter", date_lookups::quarter_transform as LookupFn); + builtin.insert("time", date_lookups::time_transform as LookupFn); + builtin.insert("iso_week", date_lookups::iso_week_transform as LookupFn); + builtin.insert("iso_dow", date_lookups::iso_dow_transform as LookupFn); + + builtin.insert("key", json_lookups::json_key_transform as LookupFn); + builtin.insert( + "key_text", + json_lookups::json_key_text_transform as LookupFn, + ); + builtin.insert("json", json_lookups::json_cast_transform as LookupFn); + + builtin.insert("has_key", json_lookups::json_has_key as LookupFn); + builtin.insert("has_keys", json_lookups::json_has_keys as LookupFn); + builtin.insert("contains", json_lookups::json_contains as LookupFn); + builtin.insert("contained_by", json_lookups::json_contained_by as LookupFn); + + RwLock::new(LookupRegistry { + builtin, + custom: HashMap::new(), + }) + }); +} + +// ### +// Registry public API +// ### + +pub fn register_custom(name: impl Into, sql_template: impl Into) -> RyxResult<()> { + let registry = REGISTRY + .get() + .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + + let mut guard = registry + .write() + .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + + guard.custom.insert( + name.into(), + PythonLookup { + sql_template: sql_template.into(), + }, + ); + + Ok(()) +} + +fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { + let registry = REGISTRY + .get() + .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + + let guard = registry + .read() + .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + + if let Some(custom) = guard.custom.get(lookup_name) { + return Ok(custom.sql_template.replace("{col}", &ctx.column)); + } + + if let Some(lookup_fn) = guard.builtin.get(lookup_name) { + return Ok(lookup_fn(ctx)); + } + + Err(RyxError::UnknownLookup { + field: field.to_string(), + lookup: lookup_name.to_string(), + }) +} + +pub fn registered_lookups() -> RyxResult> { + let registry = REGISTRY + .get() + .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + + let guard = registry + .read() + .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + + let mut names: Vec = guard + .builtin + .keys() + .copied() + .map(|k| k.to_string()) + .chain(guard.custom.keys().cloned()) + .collect(); + names.sort(); + Ok(names) +} + +// ### +// Chained lookups support +// ### + +#[allow(dead_code)] +fn handle_sqlite_transform_lookup( + field: &str, + _transform: &str, + lookup_name: &str, + ctx: &LookupContext, +) -> RyxResult { + let is_numeric_comparison = matches!(lookup_name, "gt" | "gte" | "lt" | "lte" | "exact"); + + if is_numeric_comparison && ctx.column.contains("AS TEXT)") { + let transformed = ctx.column.replace("AS TEXT)", "AS INTEGER)"); + let new_ctx = LookupContext { + column: transformed, + negated: ctx.negated, + backend: ctx.backend, + json_key: ctx.json_key.clone(), + }; + return resolve_simple(field, lookup_name, &new_ctx); + } + + resolve_simple(field, lookup_name, ctx) +} + +fn add_sqlite_cast_for_transform(fragment: &str, lookup: &str) -> String { + match lookup { + "exact" => fragment.replace("= ?", "= CAST(? AS TEXT)"), + "gt" => fragment.replace("> ?", "> CAST(? AS TEXT)"), + "gte" => fragment.replace(">= ?", ">= CAST(? AS TEXT)"), + "lt" => fragment.replace("< ?", "< CAST(? AS TEXT)"), + "lte" => fragment.replace("<= ?", "<= CAST(? AS TEXT)"), + _ => fragment.to_string(), + } +} + +pub fn resolve(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { + if !lookup_name.contains("__") { + if ctx.json_key.is_some() { + let mut column = format!("\"{}\"", field); + column = apply_transform("key", &column, ctx.backend, ctx.json_key.as_deref())?; + + let json_ctx = LookupContext { + column: column.clone(), + negated: ctx.negated, + backend: ctx.backend, + json_key: None, + }; + return resolve_simple(field, lookup_name, &json_ctx); + } + + if ctx.column.contains("strftime") || ctx.column.contains("DATE(") { + if ctx.column.contains("strftime('%Y'") { + return handle_sqlite_transform_lookup(field, "year", lookup_name, ctx); + } else if ctx.column.contains("strftime('%m'") { + return handle_sqlite_transform_lookup(field, "month", lookup_name, ctx); + } else if ctx.column.contains("strftime('%d'") { + return handle_sqlite_transform_lookup(field, "day", lookup_name, ctx); + } else if ctx.column.contains("strftime('%H'") { + return handle_sqlite_transform_lookup(field, "hour", lookup_name, ctx); + } + if ctx.column.starts_with("DATE(") { + return resolve_simple(field, lookup_name, ctx); + } + } + return resolve_simple(field, lookup_name, ctx); + } + + let parts: Vec<&str> = lookup_name.split("__").collect(); + let final_lookup = *parts.last().unwrap(); + let transform_parts: Vec<&str> = parts[..parts.len() - 1].to_vec(); + + let mut column = format!("\"{}\"", field); + + for transform in transform_parts.iter() { + let is_transform = matches!( + *transform, + "date" + | "year" + | "month" + | "day" + | "hour" + | "minute" + | "second" + | "week" + | "dow" + | "quarter" + | "time" + | "iso_week" + | "iso_dow" + | "key" + | "key_text" + | "json" + ); + + if is_transform { + let key = if matches!(*transform, "key" | "key_text") { + ctx.json_key + .as_deref() + .or_else(|| field.rsplit("__").next()) + } else { + None + }; + column = apply_transform(transform, &column, ctx.backend, key)?; + } else { + break; + } + } + + let final_ctx = LookupContext { + column: column.clone(), + negated: ctx.negated, + backend: ctx.backend, + json_key: ctx.json_key.clone(), + }; + + if ctx.backend == Backend::SQLite { + let col_has_transform = column.contains("strftime"); + + if col_has_transform && !column.contains("AS INTEGER") { + let is_numeric_comparison = + matches!(final_lookup, "gt" | "gte" | "lt" | "lte" | "exact"); + + if is_numeric_comparison { + let transformed = column.replace("AS TEXT)", "AS INTEGER)"); + let final_ctx_int = LookupContext { + column: transformed, + negated: ctx.negated, + backend: ctx.backend, + json_key: ctx.json_key.clone(), + }; + return resolve_simple(field, final_lookup, &final_ctx_int); + } + + let fragment = resolve_simple(field, final_lookup, &final_ctx)?; + return Ok(add_sqlite_cast_for_transform(&fragment, final_lookup)); + } + } + + resolve_simple(field, final_lookup, &final_ctx) +} + +pub fn apply_transform( + name: &str, + column: &str, + backend: Backend, + key: Option<&str>, +) -> RyxResult { + if let Some(sql) = date_lookups::apply_date_transform(name, column, backend) { + return Ok(sql); + } + if let Some(sql) = json_lookups::apply_json_transform(name, column, backend, key) { + return Ok(sql); + } + + if name == "date" { + return Ok(format!("DATE({})", column)); + } + + Err(RyxError::UnknownLookup { + field: column.to_string(), + lookup: name.to_string(), + }) +} diff --git a/src/query/lookups/mod.rs b/src/query/lookups/mod.rs new file mode 100644 index 0000000..b3bd303 --- /dev/null +++ b/src/query/lookups/mod.rs @@ -0,0 +1,34 @@ +// +// ### +// Ryx — Lookup Module +// ### +// +// This module provides the lookup system - the suffix after `__` in filter expressions. +// Examples: +// `age__gte=25` → lookup = "gte", SQL = "age >= $1" +// `name__icontains="bob"` → lookup = "icontains", SQL = "LOWER(name) LIKE LOWER($1)" +// +// The module is organized as: +// - mod.rs : Re-exports from lookups.rs +// - lookups.rs : Core types, registry, resolve() logic +// - common_lookups.rs: Comparison and string lookups (exact, gt, contains, etc.) +// - date_lookups.rs : Date/time transforms (year, month, day, etc.) +// - json_lookups.rs : JSON transforms and lookups (key, has_key, etc.) +// ### + +pub mod common_lookups; +pub mod date_lookups; +pub mod json_lookups; +pub mod lookups; + +// Re-export main types from lookups.rs +pub use lookups::LookupContext; +pub use lookups::LookupFn; +pub use lookups::PythonLookup; + +// Re-export functions from lookups.rs +pub use lookups::apply_transform; +pub use lookups::init_registry; +pub use lookups::register_custom; +pub use lookups::registered_lookups; +pub use lookups::resolve; diff --git a/src/query/mod.rs b/src/query/mod.rs index 8303e94..60df9c9 100644 --- a/src/query/mod.rs +++ b/src/query/mod.rs @@ -10,4 +10,4 @@ pub mod ast; pub mod compiler; -pub mod lookup; +pub mod lookups; diff --git a/uv.lock b/uv.lock index c23ada9..e57fa05 100644 --- a/uv.lock +++ b/uv.lock @@ -7,15 +7,6 @@ resolution-markers = [ "python_full_version < '3.11'", ] -[[package]] -name = "aiosqlite" -version = "0.22.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, -] - [[package]] name = "asttokens" version = "3.0.1" @@ -73,66 +64,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] -[[package]] -name = "greenlet" -version = "3.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d", size = 277747, upload-time = "2026-02-20T20:16:21.325Z" }, - { url = "https://files.pythonhosted.org/packages/fb/07/cb284a8b5c6498dbd7cba35d31380bb123d7dceaa7907f606c8ff5993cbf/greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13", size = 579202, upload-time = "2026-02-20T20:47:28.955Z" }, - { url = "https://files.pythonhosted.org/packages/ed/45/67922992b3a152f726163b19f890a85129a992f39607a2a53155de3448b8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e", size = 590620, upload-time = "2026-02-20T20:55:55.581Z" }, - { url = "https://files.pythonhosted.org/packages/03/5f/6e2a7d80c353587751ef3d44bb947f0565ec008a2e0927821c007e96d3a7/greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7", size = 602132, upload-time = "2026-02-20T21:02:43.261Z" }, - { url = "https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f", size = 591729, upload-time = "2026-02-20T20:20:58.395Z" }, - { url = "https://files.pythonhosted.org/packages/24/b4/21f5455773d37f94b866eb3cf5caed88d6cea6dd2c6e1f9c34f463cba3ec/greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef", size = 1551946, upload-time = "2026-02-20T20:49:31.102Z" }, - { url = "https://files.pythonhosted.org/packages/00/68/91f061a926abead128fe1a87f0b453ccf07368666bd59ffa46016627a930/greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca", size = 1618494, upload-time = "2026-02-20T20:21:06.541Z" }, - { url = "https://files.pythonhosted.org/packages/ac/78/f93e840cbaef8becaf6adafbaf1319682a6c2d8c1c20224267a5c6c8c891/greenlet-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:5d0e35379f93a6d0222de929a25ab47b5eb35b5ef4721c2b9cbcc4036129ff1f", size = 230092, upload-time = "2026-02-20T20:17:09.379Z" }, - { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, - { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, - { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, - { url = "https://files.pythonhosted.org/packages/9c/8b/1430a04657735a3f23116c2e0d5eb10220928846e4537a938a41b350bed6/greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2", size = 605046, upload-time = "2026-02-20T21:02:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, - { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, - { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3a/efb2cf697fbccdf75b24e2c18025e7dfa54c4f31fab75c51d0fe79942cef/greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5", size = 230389, upload-time = "2026-02-20T20:17:18.772Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a1/65bbc059a43a7e2143ec4fc1f9e3f673e04f9c7b371a494a101422ac4fd5/greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd", size = 229645, upload-time = "2026-02-20T20:18:18.695Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, - { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, - { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, - { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, - { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, - { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, - { url = "https://files.pythonhosted.org/packages/9b/40/cc802e067d02af8b60b6771cea7d57e21ef5e6659912814babb42b864713/greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f", size = 231081, upload-time = "2026-02-20T20:17:28.121Z" }, - { url = "https://files.pythonhosted.org/packages/58/2e/fe7f36ff1982d6b10a60d5e0740c759259a7d6d2e1dc41da6d96de32fff6/greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643", size = 230331, upload-time = "2026-02-20T20:17:23.34Z" }, - { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, - { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, - { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, - { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, - { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, - { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, - { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, - { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, - { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, - { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, - { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, - { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, - { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, - { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, - { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, - { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, - { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, - { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, - { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, -] - [[package]] name = "iniconfig" version = "2.3.0" @@ -388,11 +319,9 @@ wheels = [ [[package]] name = "ryx" -version = "0.1.0" +version = "0.1.2" source = { editable = "." } dependencies = [ - { name = "aiosqlite" }, - { name = "greenlet" }, { name = "ipython", version = "8.39.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.11.*'" }, { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, @@ -412,8 +341,6 @@ dev = [ [package.metadata] requires-dist = [ - { name = "aiosqlite", specifier = ">=0.22.1" }, - { name = "greenlet", specifier = ">=3.3.2" }, { name = "ipython", specifier = ">=8.0.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23" },