Skip to content

Commit

Permalink
use withTempDir
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove committed Feb 16, 2024
1 parent 297d5cf commit e32a40a
Showing 1 changed file with 8 additions and 14 deletions.
22 changes: 8 additions & 14 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

package org.apache.comet

import java.nio.file.Files
import java.io.File

import scala.util.Random

Expand All @@ -31,14 +31,6 @@ import org.apache.spark.sql.types.{DataType, DataTypes}
class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
import testImplicits._

private lazy val tempDir = {
val tmp = Files.createTempDirectory("CometCastSuite")
if (!tmp.toFile.exists()) {
assert(tmp.toFile.mkdirs())
}
tmp
}

ignore("cast long to short") {
castTest(generateLongs, DataTypes.ShortType)
}
Expand Down Expand Up @@ -105,13 +97,15 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}

private def castTest(input: DataFrame, toType: DataType) {
val df = roundtripParquet(input)
.withColumn("converted", col("a").cast(toType))
checkSparkAnswer(df)
withTempPath { dir =>
val df = roundtripParquet(input, dir)
.withColumn("converted", col("a").cast(toType))
checkSparkAnswer(df)
}
}

private def roundtripParquet(df: DataFrame): DataFrame = {
val filename = tempDir.resolve(s"castTest_${System.currentTimeMillis()}.parquet").toString
private def roundtripParquet(df: DataFrame, tempDir: File): DataFrame = {
val filename = new File(tempDir, s"castTest_${System.currentTimeMillis()}.parquet").toString
df.write.mode(SaveMode.Overwrite).parquet(filename)
spark.read.parquet(filename)
}
Expand Down

0 comments on commit e32a40a

Please sign in to comment.