Skip to content

Commit 22cb506

Browse files
authored
[logical-types] use Scalar in Expr::Logical (#12793)
* [logical-types] use Scalar in Expr::Logical * Fix rust lint * Fix CI * Fix docs
1 parent 47941f0 commit 22cb506

File tree

91 files changed

+773
-566
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

91 files changed

+773
-566
lines changed

datafusion-cli/src/functions.rs

+5-2
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use datafusion::common::{plan_err, Column};
2727
use datafusion::datasource::function::TableFunctionImpl;
2828
use datafusion::datasource::TableProvider;
2929
use datafusion::error::Result;
30-
use datafusion::logical_expr::Expr;
30+
use datafusion::logical_expr::{Expr, Scalar};
3131
use datafusion::physical_plan::memory::MemoryExec;
3232
use datafusion::physical_plan::ExecutionPlan;
3333
use datafusion::scalar::ScalarValue;
@@ -321,7 +321,10 @@ pub struct ParquetMetadataFunc {}
321321
impl TableFunctionImpl for ParquetMetadataFunc {
322322
fn call(&self, exprs: &[Expr]) -> Result<Arc<dyn TableProvider>> {
323323
let filename = match exprs.first() {
324-
Some(Expr::Literal(ScalarValue::Utf8(Some(s)))) => s, // single quote: parquet_metadata('x.parquet')
324+
Some(Expr::Literal(Scalar {
325+
value: ScalarValue::Utf8(Some(s)),
326+
..
327+
})) => s, // single quote: parquet_metadata('x.parquet')
325328
Some(Expr::Column(Column { name, .. })) => name, // double quote: parquet_metadata("x.parquet")
326329
_ => {
327330
return plan_err!(

datafusion-examples/examples/expr_api.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ async fn main() -> Result<()> {
6161
let expr2 = Expr::BinaryExpr(BinaryExpr::new(
6262
Box::new(col("a")),
6363
Operator::Plus,
64-
Box::new(Expr::Literal(ScalarValue::Int32(Some(5)))),
64+
Box::new(Expr::from(ScalarValue::Int32(Some(5)))),
6565
));
6666
assert_eq!(expr, expr2);
6767

datafusion-examples/examples/simple_udtf.rs

+11-3
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ use datafusion::physical_plan::ExecutionPlan;
3030
use datafusion::prelude::SessionContext;
3131
use datafusion_common::{plan_err, ScalarValue};
3232
use datafusion_expr::simplify::SimplifyContext;
33-
use datafusion_expr::{Expr, TableType};
33+
use datafusion_expr::{Expr, Scalar, TableType};
3434
use datafusion_optimizer::simplify_expressions::ExprSimplifier;
3535
use std::fs::File;
3636
use std::io::Seek;
@@ -133,7 +133,11 @@ struct LocalCsvTableFunc {}
133133

134134
impl TableFunctionImpl for LocalCsvTableFunc {
135135
fn call(&self, exprs: &[Expr]) -> Result<Arc<dyn TableProvider>> {
136-
let Some(Expr::Literal(ScalarValue::Utf8(Some(ref path)))) = exprs.first() else {
136+
let Some(Expr::Literal(Scalar {
137+
value: ScalarValue::Utf8(Some(ref path)),
138+
..
139+
})) = exprs.first()
140+
else {
137141
return plan_err!("read_csv requires at least one string argument");
138142
};
139143

@@ -145,7 +149,11 @@ impl TableFunctionImpl for LocalCsvTableFunc {
145149
let info = SimplifyContext::new(&execution_props);
146150
let expr = ExprSimplifier::new(info).simplify(expr.clone())?;
147151

148-
if let Expr::Literal(ScalarValue::Int64(Some(limit))) = expr {
152+
if let Expr::Literal(Scalar {
153+
value: ScalarValue::Int64(Some(limit)),
154+
..
155+
}) = expr
156+
{
149157
Ok(limit as usize)
150158
} else {
151159
plan_err!("Limit must be an integer")

datafusion/core/benches/map_query_sql.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,8 @@ fn criterion_benchmark(c: &mut Criterion) {
7171
let mut value_buffer = Vec::new();
7272

7373
for i in 0..1000 {
74-
key_buffer.push(Expr::Literal(ScalarValue::Utf8(Some(keys[i].clone()))));
75-
value_buffer.push(Expr::Literal(ScalarValue::Int32(Some(values[i]))));
74+
key_buffer.push(Expr::from(ScalarValue::Utf8(Some(keys[i].clone()))));
75+
value_buffer.push(Expr::from(ScalarValue::Int32(Some(values[i]))));
7676
}
7777
c.bench_function("map_1000_1", |b| {
7878
b.iter(|| {

datafusion/core/src/dataframe/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1188,7 +1188,7 @@ impl DataFrame {
11881188
/// ```
11891189
pub async fn count(self) -> Result<usize> {
11901190
let rows = self
1191-
.aggregate(vec![], vec![count(Expr::Literal(COUNT_STAR_EXPANSION))])?
1191+
.aggregate(vec![], vec![count(Expr::from(COUNT_STAR_EXPANSION))])?
11921192
.collect()
11931193
.await?;
11941194
let len = *rows
@@ -2985,7 +2985,7 @@ mod tests {
29852985
let join = left.clone().join_on(
29862986
right.clone(),
29872987
JoinType::Inner,
2988-
Some(Expr::Literal(ScalarValue::Null)),
2988+
Some(Expr::from(ScalarValue::Null)),
29892989
)?;
29902990
let expected_plan = "CrossJoin:\
29912991
\n TableScan: a projection=[c1], full_filters=[Boolean(NULL)]\

datafusion/core/src/datasource/listing/helpers.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -868,7 +868,7 @@ mod tests {
868868
assert_eq!(
869869
evaluate_partition_prefix(
870870
partitions,
871-
&[col("a").eq(Expr::Literal(ScalarValue::Date32(Some(3))))],
871+
&[col("a").eq(Expr::from(ScalarValue::Date32(Some(3))))],
872872
),
873873
Some(Path::from("a=1970-01-04")),
874874
);
@@ -877,7 +877,7 @@ mod tests {
877877
assert_eq!(
878878
evaluate_partition_prefix(
879879
partitions,
880-
&[col("a").eq(Expr::Literal(ScalarValue::Date64(Some(
880+
&[col("a").eq(Expr::from(ScalarValue::Date64(Some(
881881
4 * 24 * 60 * 60 * 1000
882882
)))),],
883883
),

datafusion/core/src/datasource/listing/table.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1941,7 +1941,7 @@ mod tests {
19411941
let filter_predicate = Expr::BinaryExpr(BinaryExpr::new(
19421942
Box::new(Expr::Column("column1".into())),
19431943
Operator::GtEq,
1944-
Box::new(Expr::Literal(ScalarValue::Int32(Some(0)))),
1944+
Box::new(Expr::from(ScalarValue::Int32(Some(0)))),
19451945
));
19461946

19471947
// Create a new batch of data to insert into the table

datafusion/core/src/datasource/physical_plan/parquet/row_filter.rs

+10-8
Original file line numberDiff line numberDiff line change
@@ -373,7 +373,7 @@ impl<'schema> TreeNodeRewriter for PushdownChecker<'schema> {
373373
//
374374
// See comments on `FilterCandidateBuilder` for more information
375375
let null_value = ScalarValue::try_from(field.data_type())?;
376-
Ok(Transformed::yes(Arc::new(Literal::new(null_value)) as _))
376+
Ok(Transformed::yes(Arc::new(Literal::from(null_value)) as _))
377377
})
378378
// If the column is not in the table schema, should throw the error
379379
.map_err(|e| arrow_datafusion_err!(e));
@@ -699,9 +699,10 @@ mod test {
699699
.expect("expected error free record batch");
700700

701701
// Test all should fail
702-
let expr = col("timestamp_col").lt(Expr::Literal(
703-
ScalarValue::TimestampNanosecond(Some(1), Some(Arc::from("UTC"))),
704-
));
702+
let expr = col("timestamp_col").lt(Expr::from(ScalarValue::TimestampNanosecond(
703+
Some(1),
704+
Some(Arc::from("UTC")),
705+
)));
705706
let expr = logical2physical(&expr, &table_schema);
706707
let candidate = FilterCandidateBuilder::new(expr, &file_schema, &table_schema)
707708
.build(&metadata)
@@ -723,9 +724,10 @@ mod test {
723724
assert!(matches!(filtered, Ok(a) if a == BooleanArray::from(vec![false; 8])));
724725

725726
// Test all should pass
726-
let expr = col("timestamp_col").gt(Expr::Literal(
727-
ScalarValue::TimestampNanosecond(Some(0), Some(Arc::from("UTC"))),
728-
));
727+
let expr = col("timestamp_col").gt(Expr::from(ScalarValue::TimestampNanosecond(
728+
Some(0),
729+
Some(Arc::from("UTC")),
730+
)));
729731
let expr = logical2physical(&expr, &table_schema);
730732
let candidate = FilterCandidateBuilder::new(expr, &file_schema, &table_schema)
731733
.build(&metadata)
@@ -826,7 +828,7 @@ mod test {
826828

827829
let expr = col("str_col")
828830
.is_not_null()
829-
.or(col("int_col").gt(Expr::Literal(ScalarValue::UInt64(Some(5)))));
831+
.or(col("int_col").gt(Expr::from(ScalarValue::UInt64(Some(5)))));
830832

831833
assert!(can_expr_be_pushed_down_with_schemas(
832834
&expr,

datafusion/core/src/datasource/physical_plan/parquet/row_group_filter.rs

+9-9
Original file line numberDiff line numberDiff line change
@@ -1237,10 +1237,10 @@ mod tests {
12371237
.run(
12381238
lit("1").eq(lit("1")).and(
12391239
col(r#""String""#)
1240-
.eq(Expr::Literal(ScalarValue::Utf8View(Some(String::from(
1240+
.eq(Expr::from(ScalarValue::Utf8View(Some(String::from(
12411241
"Hello_Not_Exists",
12421242
)))))
1243-
.or(col(r#""String""#).eq(Expr::Literal(ScalarValue::Utf8View(
1243+
.or(col(r#""String""#).eq(Expr::from(ScalarValue::Utf8View(
12441244
Some(String::from("Hello_Not_Exists2")),
12451245
)))),
12461246
),
@@ -1322,15 +1322,15 @@ mod tests {
13221322
// generate pruning predicate `(String = "Hello") OR (String = "the quick") OR (String = "are you")`
13231323
.run(
13241324
col(r#""String""#)
1325-
.eq(Expr::Literal(ScalarValue::Utf8View(Some(String::from(
1325+
.eq(Expr::from(ScalarValue::Utf8View(Some(String::from(
13261326
"Hello",
13271327
)))))
1328-
.or(col(r#""String""#).eq(Expr::Literal(ScalarValue::Utf8View(
1329-
Some(String::from("the quick")),
1330-
))))
1331-
.or(col(r#""String""#).eq(Expr::Literal(ScalarValue::Utf8View(
1332-
Some(String::from("are you")),
1333-
)))),
1328+
.or(col(r#""String""#).eq(Expr::from(ScalarValue::Utf8View(Some(
1329+
String::from("the quick"),
1330+
)))))
1331+
.or(col(r#""String""#).eq(Expr::from(ScalarValue::Utf8View(Some(
1332+
String::from("are you"),
1333+
))))),
13341334
)
13351335
.await
13361336
}

datafusion/core/src/physical_optimizer/enforce_distribution.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1722,7 +1722,7 @@ pub(crate) mod tests {
17221722
let predicate = Arc::new(BinaryExpr::new(
17231723
col("c", &schema()).unwrap(),
17241724
Operator::Eq,
1725-
Arc::new(Literal::new(ScalarValue::Int64(Some(0)))),
1725+
Arc::new(Literal::from(ScalarValue::Int64(Some(0)))),
17261726
));
17271727
Arc::new(FilterExec::try_new(predicate, input).unwrap())
17281728
}

datafusion/core/src/physical_optimizer/projection_pushdown.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -2187,7 +2187,7 @@ mod tests {
21872187
Arc::new(Column::new("b_left_inter", 0)),
21882188
Operator::Minus,
21892189
Arc::new(BinaryExpr::new(
2190-
Arc::new(Literal::new(ScalarValue::Int32(Some(1)))),
2190+
Arc::new(Literal::from(ScalarValue::Int32(Some(1)))),
21912191
Operator::Plus,
21922192
Arc::new(Column::new("a_right_inter", 1)),
21932193
)),
@@ -2301,7 +2301,7 @@ mod tests {
23012301
Arc::new(Column::new("b_left_inter", 0)),
23022302
Operator::Minus,
23032303
Arc::new(BinaryExpr::new(
2304-
Arc::new(Literal::new(ScalarValue::Int32(Some(1)))),
2304+
Arc::new(Literal::from(ScalarValue::Int32(Some(1)))),
23052305
Operator::Plus,
23062306
Arc::new(Column::new("a_right_inter", 1)),
23072307
)),
@@ -2382,7 +2382,7 @@ mod tests {
23822382
Arc::new(Column::new("b", 7)),
23832383
Operator::Minus,
23842384
Arc::new(BinaryExpr::new(
2385-
Arc::new(Literal::new(ScalarValue::Int32(Some(1)))),
2385+
Arc::new(Literal::from(ScalarValue::Int32(Some(1)))),
23862386
Operator::Plus,
23872387
Arc::new(Column::new("a", 1)),
23882388
)),
@@ -2410,7 +2410,7 @@ mod tests {
24102410
Arc::new(Column::new("b_left_inter", 0)),
24112411
Operator::Minus,
24122412
Arc::new(BinaryExpr::new(
2413-
Arc::new(Literal::new(ScalarValue::Int32(Some(1)))),
2413+
Arc::new(Literal::from(ScalarValue::Int32(Some(1)))),
24142414
Operator::Plus,
24152415
Arc::new(Column::new("a_right_inter", 1)),
24162416
)),

datafusion/core/src/physical_optimizer/pruning.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -714,7 +714,7 @@ impl BoolVecBuilder {
714714
fn is_always_true(expr: &Arc<dyn PhysicalExpr>) -> bool {
715715
expr.as_any()
716716
.downcast_ref::<phys_expr::Literal>()
717-
.map(|l| matches!(l.value(), ScalarValue::Boolean(Some(true))))
717+
.map(|l| matches!(l.scalar().value(), ScalarValue::Boolean(Some(true))))
718718
.unwrap_or_default()
719719
}
720720

@@ -1300,7 +1300,7 @@ fn build_is_null_column_expr(
13001300
Arc::new(phys_expr::BinaryExpr::new(
13011301
null_count_column_expr,
13021302
Operator::Gt,
1303-
Arc::new(phys_expr::Literal::new(ScalarValue::UInt64(Some(0)))),
1303+
Arc::new(phys_expr::Literal::from(ScalarValue::UInt64(Some(0)))),
13041304
)) as _
13051305
})
13061306
.ok()
@@ -1328,7 +1328,7 @@ fn build_predicate_expression(
13281328
) -> Arc<dyn PhysicalExpr> {
13291329
// Returned for unsupported expressions. Such expressions are
13301330
// converted to TRUE.
1331-
let unhandled = Arc::new(phys_expr::Literal::new(ScalarValue::Boolean(Some(true))));
1331+
let unhandled = Arc::new(phys_expr::Literal::from(ScalarValue::Boolean(Some(true))));
13321332

13331333
// predicate expression can only be a binary expression
13341334
let expr_any = expr.as_any();
@@ -1549,7 +1549,7 @@ fn wrap_case_expr(
15491549
Operator::Eq,
15501550
expr_builder.row_count_column_expr()?,
15511551
));
1552-
let then = Arc::new(phys_expr::Literal::new(ScalarValue::Boolean(Some(false))));
1552+
let then = Arc::new(phys_expr::Literal::from(ScalarValue::Boolean(Some(false))));
15531553

15541554
// CASE WHEN x_null_count = x_row_count THEN false ELSE <statistics_expr> END
15551555
Ok(Arc::new(phys_expr::CaseExpr::try_new(

datafusion/core/src/physical_planner.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -1423,7 +1423,7 @@ fn get_null_physical_expr_pair(
14231423
let data_type = physical_expr.data_type(input_schema)?;
14241424
let null_value: ScalarValue = (&data_type).try_into()?;
14251425

1426-
let null_value = Literal::new(null_value);
1426+
let null_value = Literal::from(null_value);
14271427
Ok((Arc::new(null_value), physical_name))
14281428
}
14291429

@@ -2018,7 +2018,7 @@ mod tests {
20182018
// verify that the plan correctly casts u8 to i64
20192019
// the cast from u8 to i64 for literal will be simplified, and get lit(int64(5))
20202020
// the cast here is implicit so has CastOptions with safe=true
2021-
let expected = "BinaryExpr { left: Column { name: \"c7\", index: 2 }, op: Lt, right: Literal { value: Int64(5) }, fail_on_overflow: false }";
2021+
let expected = "BinaryExpr { left: Column { name: \"c7\", index: 2 }, op: Lt, right: Literal { scalar: Int64(5) }, fail_on_overflow: false }";
20222022
assert!(format!("{exec_plan:?}").contains(expected));
20232023
Ok(())
20242024
}
@@ -2043,7 +2043,7 @@ mod tests {
20432043
&session_state,
20442044
);
20452045

2046-
let expected = r#"Ok(PhysicalGroupBy { expr: [(Column { name: "c1", index: 0 }, "c1"), (Column { name: "c2", index: 1 }, "c2"), (Column { name: "c3", index: 2 }, "c3")], null_expr: [(Literal { value: Utf8(NULL) }, "c1"), (Literal { value: Int64(NULL) }, "c2"), (Literal { value: Int64(NULL) }, "c3")], groups: [[false, false, false], [true, false, false], [false, true, false], [false, false, true], [true, true, false], [true, false, true], [false, true, true], [true, true, true]] })"#;
2046+
let expected = r#"Ok(PhysicalGroupBy { expr: [(Column { name: "c1", index: 0 }, "c1"), (Column { name: "c2", index: 1 }, "c2"), (Column { name: "c3", index: 2 }, "c3")], null_expr: [(Literal { scalar: Utf8(NULL) }, "c1"), (Literal { scalar: Int64(NULL) }, "c2"), (Literal { scalar: Int64(NULL) }, "c3")], groups: [[false, false, false], [true, false, false], [false, true, false], [false, false, true], [true, true, false], [true, false, true], [false, true, true], [true, true, true]] })"#;
20472047

20482048
assert_eq!(format!("{cube:?}"), expected);
20492049

@@ -2070,7 +2070,7 @@ mod tests {
20702070
&session_state,
20712071
);
20722072

2073-
let expected = r#"Ok(PhysicalGroupBy { expr: [(Column { name: "c1", index: 0 }, "c1"), (Column { name: "c2", index: 1 }, "c2"), (Column { name: "c3", index: 2 }, "c3")], null_expr: [(Literal { value: Utf8(NULL) }, "c1"), (Literal { value: Int64(NULL) }, "c2"), (Literal { value: Int64(NULL) }, "c3")], groups: [[true, true, true], [false, true, true], [false, false, true], [false, false, false]] })"#;
2073+
let expected = r#"Ok(PhysicalGroupBy { expr: [(Column { name: "c1", index: 0 }, "c1"), (Column { name: "c2", index: 1 }, "c2"), (Column { name: "c3", index: 2 }, "c3")], null_expr: [(Literal { scalar: Utf8(NULL) }, "c1"), (Literal { scalar: Int64(NULL) }, "c2"), (Literal { scalar: Int64(NULL) }, "c3")], groups: [[true, true, true], [false, true, true], [false, false, true], [false, false, false]] })"#;
20742074

20752075
assert_eq!(format!("{rollup:?}"), expected);
20762076

@@ -2254,7 +2254,7 @@ mod tests {
22542254
let execution_plan = plan(&logical_plan).await?;
22552255
// verify that the plan correctly adds cast from Int64(1) to Utf8, and the const will be evaluated.
22562256

2257-
let expected = "expr: [(BinaryExpr { left: BinaryExpr { left: Column { name: \"c1\", index: 0 }, op: Eq, right: Literal { value: Utf8(\"a\") }, fail_on_overflow: false }, op: Or, right: BinaryExpr { left: Column { name: \"c1\", index: 0 }, op: Eq, right: Literal { value: Utf8(\"1\") }, fail_on_overflow: false }, fail_on_overflow: false }";
2257+
let expected = "expr: [(BinaryExpr { left: BinaryExpr { left: Column { name: \"c1\", index: 0 }, op: Eq, right: Literal { scalar: Utf8(\"a\") }, fail_on_overflow: false }, op: Or, right: BinaryExpr { left: Column { name: \"c1\", index: 0 }, op: Eq, right: Literal { scalar: Utf8(\"1\") }, fail_on_overflow: false }, fail_on_overflow: false }";
22582258

22592259
let actual = format!("{execution_plan:?}");
22602260
assert!(actual.contains(expected), "{}", actual);

datafusion/core/tests/custom_sources_cases/provider_filter_pushdown.rs

+10-5
Original file line numberDiff line numberDiff line change
@@ -174,12 +174,17 @@ impl TableProvider for CustomProvider {
174174
match &filters[0] {
175175
Expr::BinaryExpr(BinaryExpr { right, .. }) => {
176176
let int_value = match &**right {
177-
Expr::Literal(ScalarValue::Int8(Some(i))) => *i as i64,
178-
Expr::Literal(ScalarValue::Int16(Some(i))) => *i as i64,
179-
Expr::Literal(ScalarValue::Int32(Some(i))) => *i as i64,
180-
Expr::Literal(ScalarValue::Int64(Some(i))) => *i,
177+
Expr::Literal(lit_value) => match lit_value.value() {
178+
ScalarValue::Int8(Some(v)) => *v as i64,
179+
ScalarValue::Int16(Some(v)) => *v as i64,
180+
ScalarValue::Int32(Some(v)) => *v as i64,
181+
ScalarValue::Int64(Some(v)) => *v,
182+
other_value => {
183+
return not_impl_err!("Do not support value {other_value:?}");
184+
}
185+
},
181186
Expr::Cast(Cast { expr, data_type: _ }) => match expr.deref() {
182-
Expr::Literal(lit_value) => match lit_value {
187+
Expr::Literal(lit_value) => match lit_value.value() {
183188
ScalarValue::Int8(Some(v)) => *v as i64,
184189
ScalarValue::Int16(Some(v)) => *v as i64,
185190
ScalarValue::Int32(Some(v)) => *v as i64,

datafusion/core/tests/dataframe/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1634,7 +1634,7 @@ async fn consecutive_projection_same_schema() -> Result<()> {
16341634

16351635
// Add `t` column full of nulls
16361636
let df = df
1637-
.with_column("t", cast(Expr::Literal(ScalarValue::Null), DataType::Int32))
1637+
.with_column("t", cast(Expr::from(ScalarValue::Null), DataType::Int32))
16381638
.unwrap();
16391639
df.clone().show().await.unwrap();
16401640

datafusion/core/tests/expr_api/simplification.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@ fn select_date_plus_interval() -> Result<()> {
282282

283283
let date_plus_interval_expr = to_timestamp_expr(ts_string)
284284
.cast_to(&DataType::Date32, schema)?
285-
+ Expr::Literal(ScalarValue::IntervalDayTime(Some(IntervalDayTime {
285+
+ Expr::from(ScalarValue::IntervalDayTime(Some(IntervalDayTime {
286286
days: 123,
287287
milliseconds: 0,
288288
})));

datafusion/core/tests/fuzz_cases/join_fuzz.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -329,7 +329,7 @@ impl JoinFuzzTestCase {
329329
filter.schema().fields().len(),
330330
)
331331
} else {
332-
(Arc::new(Literal::new(ScalarValue::from(true))) as _, 0)
332+
(Arc::new(Literal::from(ScalarValue::from(true))) as _, 0)
333333
};
334334

335335
let equal_a = Arc::new(BinaryExpr::new(

datafusion/core/tests/sql/path_partition.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ async fn parquet_partition_pruning_filter() -> Result<()> {
9191
let expected = Arc::new(BinaryExpr::new(
9292
Arc::new(Column::new_with_schema("id", &exec.schema()).unwrap()),
9393
Operator::Gt,
94-
Arc::new(Literal::new(ScalarValue::Int32(Some(1)))),
94+
Arc::new(Literal::from(ScalarValue::Int32(Some(1)))),
9595
));
9696

9797
assert!(pred.as_any().is::<BinaryExpr>());

0 commit comments

Comments
 (0)