Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 54 additions & 1 deletion native/core/src/execution/expressions/strings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use datafusion::common::ScalarValue;
use datafusion::physical_expr::expressions::{LikeExpr, Literal};
use datafusion::physical_expr::PhysicalExpr;
use datafusion_comet_proto::spark_expression::Expr;
use datafusion_comet_spark_expr::{FromJson, RLike, SubstringExpr};
use datafusion_comet_spark_expr::{EndsWithExpr, FromJson, RLike, StartsWithExpr, SubstringExpr};

use crate::execution::{
expressions::extract_expr,
Expand Down Expand Up @@ -123,3 +123,56 @@ impl ExpressionBuilder for FromJsonBuilder {
Ok(Arc::new(FromJson::new(child, schema, &expr.timezone)))
}
}

/// Builder for StartsWith expressions
pub struct StartsWithBuilder;

impl ExpressionBuilder for StartsWithBuilder {
fn build(
&self,
spark_expr: &Expr,
input_schema: SchemaRef,
planner: &PhysicalPlanner,
) -> Result<Arc<dyn PhysicalExpr>, ExecutionError> {
let expr = extract_expr!(spark_expr, StartsWith);
let left = planner.create_expr(expr.left.as_ref().unwrap(), Arc::clone(&input_schema))?;
let right = planner.create_expr(expr.right.as_ref().unwrap(), input_schema)?;

let pattern = extract_string_literal(&right)?;
Ok(Arc::new(StartsWithExpr::new(left, pattern)))
}
}

/// Builder for EndsWith expressions
pub struct EndsWithBuilder;

impl ExpressionBuilder for EndsWithBuilder {
fn build(
&self,
spark_expr: &Expr,
input_schema: SchemaRef,
planner: &PhysicalPlanner,
) -> Result<Arc<dyn PhysicalExpr>, ExecutionError> {
let expr = extract_expr!(spark_expr, EndsWith);
let left = planner.create_expr(expr.left.as_ref().unwrap(), Arc::clone(&input_schema))?;
let right = planner.create_expr(expr.right.as_ref().unwrap(), input_schema)?;

let pattern = extract_string_literal(&right)?;
Ok(Arc::new(EndsWithExpr::new(left, pattern)))
}
}

/// Helper function to extract a string literal from a physical expression
fn extract_string_literal(expr: &Arc<dyn PhysicalExpr>) -> Result<String, ExecutionError> {
match expr.as_any().downcast_ref::<Literal>() {
Some(literal) => match literal.value() {
ScalarValue::Utf8(Some(s)) => Ok(s.clone()),
_ => Err(ExecutionError::GeneralError(
"StartsWith/EndsWith pattern must be a string literal".to_string(),
)),
},
None => Err(ExecutionError::GeneralError(
"StartsWith/EndsWith pattern must be a literal".to_string(),
)),
}
}
10 changes: 10 additions & 0 deletions native/core/src/execution/planner/expression_registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,8 @@ pub enum ExpressionType {
In,
If,
Substring,
StartsWith,
EndsWith,
Like,
Rlike,
CheckOverflow,
Expand Down Expand Up @@ -278,6 +280,11 @@ impl ExpressionRegistry {

self.builders
.insert(ExpressionType::Substring, Box::new(SubstringBuilder));
self.builders
.insert(ExpressionType::StartsWith, Box::new(StartsWithBuilder));
self.builders
.insert(ExpressionType::EndsWith, Box::new(EndsWithBuilder));

self.builders
.insert(ExpressionType::Like, Box::new(LikeBuilder));
self.builders
Expand Down Expand Up @@ -327,6 +334,9 @@ impl ExpressionRegistry {
Some(ExprStruct::In(_)) => Ok(ExpressionType::In),
Some(ExprStruct::If(_)) => Ok(ExpressionType::If),
Some(ExprStruct::Substring(_)) => Ok(ExpressionType::Substring),
Some(ExprStruct::StartsWith(_)) => Ok(ExpressionType::StartsWith),
Some(ExprStruct::EndsWith(_)) => Ok(ExpressionType::EndsWith),

Some(ExprStruct::Like(_)) => Ok(ExpressionType::Like),
Some(ExprStruct::Rlike(_)) => Ok(ExpressionType::Rlike),
Some(ExprStruct::CheckOverflow(_)) => Ok(ExpressionType::CheckOverflow),
Expand Down
2 changes: 2 additions & 0 deletions native/proto/src/proto/expr.proto
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ message Expr {
EmptyExpr spark_partition_id = 63;
EmptyExpr monotonically_increasing_id = 64;
FromJson from_json = 89;
BinaryExpr starts_with = 90;
BinaryExpr ends_with = 91;
}
}

Expand Down
2 changes: 2 additions & 0 deletions native/spark-expr/src/string_funcs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@
// specific language governing permissions and limitations
// under the License.

mod starts_ends_with;
mod string_space;
mod substring;

pub use starts_ends_with::{EndsWithExpr, StartsWithExpr};
pub use string_space::SparkStringSpace;
pub use substring::SubstringExpr;
270 changes: 270 additions & 0 deletions native/spark-expr/src/string_funcs/starts_ends_with.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,270 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

use arrow::array::{Array, BooleanArray, Scalar, StringArray};
use arrow::buffer::BooleanBuffer;
use arrow::compute;
use arrow::datatypes::DataType;
use datafusion::common::{Result, ScalarValue};
use datafusion::logical_expr::ColumnarValue;
use datafusion::physical_expr::PhysicalExpr;
use std::any::Any;
use std::fmt::{Debug, Display, Formatter};
use std::hash::{Hash, Hasher};
use std::sync::Arc;

#[derive(Debug)]
pub struct StartsWithExpr {
pub child: Arc<dyn PhysicalExpr>,
pub pattern_array: Arc<StringArray>, // Pre-allocated pattern
}

impl StartsWithExpr {
pub fn new(child: Arc<dyn PhysicalExpr>, pattern: String) -> Self {
// Optimization: Allocate the pattern array ONCE during construction
// This avoids creating a new StringArray for every single batch
let pattern_array = Arc::new(StringArray::from(vec![pattern]));
Self {
child,
pattern_array,
}
}
}

impl Hash for StartsWithExpr {
fn hash<H: Hasher>(&self, state: &mut H) {
self.child.hash(state);
self.pattern_array.value(0).hash(state);
}
}

impl PartialEq for StartsWithExpr {
fn eq(&self, other: &Self) -> bool {
self.child.eq(&other.child) && self.pattern_array.value(0) == other.pattern_array.value(0)
}
}

impl Eq for StartsWithExpr {}

impl Display for StartsWithExpr {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"startsWith({}, \"{}\")",
self.child,
self.pattern_array.value(0)
)
}
}

impl PhysicalExpr for StartsWithExpr {
fn as_any(&self) -> &dyn Any {
self
}

fn fmt_sql(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
unimplemented!()
}

fn data_type(&self, _input_schema: &arrow::datatypes::Schema) -> Result<DataType> {
Ok(DataType::Boolean)
}

fn nullable(&self, input_schema: &arrow::datatypes::Schema) -> Result<bool> {
self.child.nullable(input_schema)
}

fn evaluate(&self, batch: &arrow::record_batch::RecordBatch) -> Result<ColumnarValue> {
let arg = self.child.evaluate(batch)?;

match arg {
ColumnarValue::Array(array) => {
// Zero-Allocation here: We reuse the pre-allocated pattern_array
let scalar = Scalar::new(self.pattern_array.as_ref());

// Use Arrow's highly optimized SIMD kernel
let result = compute::starts_with(&array, &scalar)?;

Ok(ColumnarValue::Array(Arc::new(result)))
}
ColumnarValue::Scalar(ScalarValue::Utf8(Some(str_val))) => {
// Fallback for scalar inputs (rare in big data, but necessary)
let pattern_scalar = self.pattern_array.value(0);
Ok(ColumnarValue::Scalar(ScalarValue::Boolean(Some(
str_val.starts_with(pattern_scalar),
))))
}
ColumnarValue::Scalar(ScalarValue::Utf8(None)) => {
Ok(ColumnarValue::Scalar(ScalarValue::Boolean(None)))
}
_ => Err(datafusion::error::DataFusionError::Internal(
"StartsWith requires StringArray input".to_string(),
)),
}
}

fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
vec![&self.child]
}

fn with_new_children(
self: Arc<Self>,
children: Vec<Arc<dyn PhysicalExpr>>,
) -> Result<Arc<dyn PhysicalExpr>> {
Ok(Arc::new(StartsWithExpr::new(
Arc::clone(&children[0]),
self.pattern_array.value(0).to_string(),
)))
}
}

// ----------------------------------------------------------------------------
// ENDS WITH IMPLEMENTATION
// ----------------------------------------------------------------------------

#[derive(Debug)]
pub struct EndsWithExpr {
pub child: Arc<dyn PhysicalExpr>,
pub pattern: String, // Keep pattern as String for raw byte access
pub pattern_len: usize, // Pre-calculate length
}

impl EndsWithExpr {
pub fn new(child: Arc<dyn PhysicalExpr>, pattern: String) -> Self {
let pattern_len = pattern.len();
Self {
child,
pattern,
pattern_len,
}
}
}

impl Hash for EndsWithExpr {
fn hash<H: Hasher>(&self, state: &mut H) {
self.child.hash(state);
self.pattern.hash(state);
}
}

impl PartialEq for EndsWithExpr {
fn eq(&self, other: &Self) -> bool {
self.child.eq(&other.child) && self.pattern == other.pattern
}
}

impl Eq for EndsWithExpr {}

impl Display for EndsWithExpr {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "endsWith({}, \"{}\")", self.child, self.pattern)
}
}

impl PhysicalExpr for EndsWithExpr {
fn as_any(&self) -> &dyn Any {
self
}

fn fmt_sql(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
unimplemented!()
}

fn data_type(&self, _input_schema: &arrow::datatypes::Schema) -> Result<DataType> {
Ok(DataType::Boolean)
}

fn nullable(&self, input_schema: &arrow::datatypes::Schema) -> Result<bool> {
self.child.nullable(input_schema)
}

fn evaluate(&self, batch: &arrow::record_batch::RecordBatch) -> Result<ColumnarValue> {
let arg = self.child.evaluate(batch)?;

match arg {
ColumnarValue::Array(array) => {
let string_array = array.as_any().downcast_ref::<StringArray>().unwrap();
let len = string_array.len();

let offsets = string_array.value_offsets();
let values = string_array.value_data();
let pattern_bytes = self.pattern.as_bytes();
let p_len = self.pattern_len;

let mut buffer = Vec::with_capacity(len.div_ceil(8));
let mut current_byte: u8 = 0;
let mut bit_mask: u8 = 1;

for i in 0..len {
let start = offsets[i] as usize;
let end = offsets[i + 1] as usize;
let str_len = end - start;

let is_match = if str_len >= p_len {
let tail_start = end - p_len;
&values[tail_start..end] == pattern_bytes
} else {
false
};

if is_match {
current_byte |= bit_mask;
}

bit_mask = bit_mask.rotate_left(1);
if bit_mask == 1 {
buffer.push(current_byte);
current_byte = 0;
}
}

if bit_mask != 1 {
buffer.push(current_byte);
}

let nulls = string_array.nulls().cloned();
let boolean_buffer = BooleanBuffer::new(buffer.into(), 0, len);
let result_array = BooleanArray::new(boolean_buffer, nulls);

Ok(ColumnarValue::Array(Arc::new(result_array)))
}
ColumnarValue::Scalar(ScalarValue::Utf8(Some(str_val))) => Ok(ColumnarValue::Scalar(
ScalarValue::Boolean(Some(str_val.ends_with(&self.pattern))),
)),
ColumnarValue::Scalar(ScalarValue::Utf8(None)) => {
Ok(ColumnarValue::Scalar(ScalarValue::Boolean(None)))
}
_ => Err(datafusion::error::DataFusionError::Internal(
"EndsWith requires StringArray input".to_string(),
)),
}
}

fn children(&self) -> Vec<&Arc<dyn PhysicalExpr>> {
vec![&self.child]
}

fn with_new_children(
self: Arc<Self>,
children: Vec<Arc<dyn PhysicalExpr>>,
) -> Result<Arc<dyn PhysicalExpr>> {
Ok(Arc::new(EndsWithExpr::new(
Arc::clone(&children[0]),
self.pattern.clone(),
)))
}
}
Loading