Skip to content

Commit

Permalink
fix: Cast string to boolean not compatible with Spark
Browse files Browse the repository at this point in the history
  • Loading branch information
erenavsarogullari committed Feb 24, 2024
1 parent ac3e0a7 commit 4c2eecd
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 4 deletions.
35 changes: 32 additions & 3 deletions core/src/execution/datafusion/expressions/cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use arrow::{
record_batch::RecordBatch,
util::display::FormatOptions,
};
use arrow_array::ArrayRef;
use arrow_array::{Array, ArrayRef, BooleanArray, GenericStringArray, OffsetSizeTrait};
use arrow_schema::{DataType, Schema};
use datafusion::logical_expr::ColumnarValue;
use datafusion_common::{Result as DataFusionResult, ScalarValue};
Expand Down Expand Up @@ -75,8 +75,37 @@ impl Cast {
fn cast_array(&self, array: ArrayRef) -> DataFusionResult<ArrayRef> {
let array = array_with_timezone(array, self.timezone.clone(), Some(&self.data_type));
let from_type = array.data_type();
let cast_result = cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?;
Ok(spark_cast(cast_result, from_type, &self.data_type))
let to_type = &self.data_type;
let cast_result = match (from_type, to_type) {
(DataType::Utf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::<i32>(&array),
(DataType::LargeUtf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::<i64>(&array),
_ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?
};
let result = spark_cast(cast_result, from_type, &self.data_type);
Ok(result)
}

fn spark_cast_utf8_to_boolean<OffsetSize>(from: &dyn Array) -> ArrayRef
where
OffsetSize: OffsetSizeTrait,
{
let array = from
.as_any()
.downcast_ref::<GenericStringArray<OffsetSize>>()
.unwrap();

let output_array = array
.iter()
.map(|value| match value {
Some(value) => match value.to_ascii_lowercase().trim() {
"t" | "true" | "y" | "yes" | "1" => Some(true),
"f" | "false" | "n" | "no" | "0" => Some(false),
_ => None
},
_ => None
}).collect::<BooleanArray>();

Arc::new(output_array)
}
}

Expand Down
49 changes: 48 additions & 1 deletion spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.apache.comet.exec

import scala.collection.JavaConverters._
import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable`
import scala.collection.mutable
import scala.util.Random

Expand All @@ -37,9 +38,10 @@ import org.apache.spark.sql.execution.{CollectLimitExec, ProjectExec, UnionExec}
import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec
import org.apache.spark.sql.execution.joins.{BroadcastNestedLoopJoinExec, CartesianProductExec, SortMergeJoinExec}
import org.apache.spark.sql.execution.window.WindowExec
import org.apache.spark.sql.functions.{date_add, expr}
import org.apache.spark.sql.functions.{col, date_add, expr}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE
import org.apache.spark.sql.types.DataTypes
import org.apache.spark.unsafe.types.UTF8String

import org.apache.comet.CometConf
Expand Down Expand Up @@ -218,6 +220,51 @@ class CometExecSuite extends CometTestBase {
}
}

test("test cast utf8 to boolean as compatible with Spark") {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") {
withTable("test_table1", "test_table2", "test_table3", "test_table4") {
// Supported boolean values as true by both Arrow and Spark
val inputDF = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES").toDF("c1")
inputDF.write.format("parquet").saveAsTable("test_table1")
val resultDF = this.spark
.table("test_table1")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr = resultDF.collectAsList().toList
resultArr.foreach(x => assert(x.get(1) == true))

// Supported boolean values as false by both Arrow and Spark
val inputDF2 = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No").toDF("c1")
inputDF2.write.format("parquet").saveAsTable("test_table2")
val resultDF2 = this.spark
.table("test_table2")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr2 = resultDF2.collectAsList().toList
resultArr2.foreach(x => assert(x.get(1) == false))

// Supported boolean values by Arrow but not Spark
val inputDF3 =
Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off").toDF("c1")
inputDF3.write.format("parquet").saveAsTable("test_table3")
val resultDF3 = this.spark
.table("test_table3")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr3 = resultDF3.collectAsList().toList
resultArr3.foreach(x => assert(x.get(1) == null))

// Invalid boolean casting values for Arrow and Spark
val inputDF4 = Seq("car", "Truck").toDF("c1")
inputDF4.write.format("parquet").saveAsTable("test_table4")
val resultDF4 = this.spark
.table("test_table4")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr4 = resultDF4.collectAsList().toList
resultArr4.foreach(x => assert(x.get(1) == null))
}
}
}

test(
"fix: ReusedExchangeExec + CometShuffleExchangeExec under QueryStageExec " +
"should be CometRoot") {
Expand Down

0 comments on commit 4c2eecd

Please sign in to comment.