Skip to content

Commit

Permalink
Write all test files to tmp directory and correctly delete them
Browse files Browse the repository at this point in the history
Fixes problems with leftovers from previous launches preventing running
tests for all Scala versions sequentially (with +test)
  • Loading branch information
akudiyar committed Apr 15, 2023
1 parent 01c2c3a commit 121ff1b
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import scala.reflect.io.Directory
/** Shared Docker container and Spark instance between all tests cases */
object SharedSparkContext extends Logging {

private lazy val warehouseLocation = Files.createTempDirectory("spark-wirehouse").toFile
private lazy val warehouseLocation = Files.createTempDirectory("spark-warehouse").toFile

private lazy val clusterCookie =
sys.env.getOrElse("TARANTOOL_CLUSTER_COOKIE", "testapp-cluster-cookie")
Expand Down Expand Up @@ -74,9 +74,10 @@ object SharedSparkContext extends Logging {
.config(conf)
.config("spark.ui.enabled", false)
.config("spark.sql.warehouse.dir", warehouseLocationPath)
.config("hive.metastore.warehouse.dir", warehouseLocationPath)
.config(
"javax.jdo.option.ConnectionURL",
"jdbc:derby:;databaseName=tarantoolTest;create=true"
s"jdbc:derby:;databaseName=$warehouseLocationPath/tarantoolTest;create=true"
)

if (withHiveSupport)
Expand All @@ -96,6 +97,9 @@ object SharedSparkContext extends Logging {
_conf
}

def dbLocation: String =
warehouseLocation.getAbsolutePath

def sc: SparkContext =
sparkSession.get().sparkContext

Expand All @@ -107,11 +111,8 @@ object SharedSparkContext extends Logging {
if (sparkSession.compareAndSet(scRef, null)) {
scRef.stop()
}
cleanupTempDirectory()
}

def cleanupTempDirectory(): Unit =
Directory(warehouseLocation).deleteRecursively()
}

def teardown(): Unit = {
container.stop()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,9 @@ class TarantoolSparkWriteClusterWithHiveTest extends AnyFunSuite with Matchers w
forAll(variants) { (stopOnError, batchSize) =>
val space = "reg_numbers"

SharedSparkContext.spark.sql("create database if not exists dl_raw")
SharedSparkContext.spark.sql("drop table if exists DL_RAW.reg_numbers")
SharedSparkContext.spark.sql("drop database if exists DL_RAW cascade")
SharedSparkContext.spark.sql("create database if not exists DL_RAW")
SharedSparkContext.spark.sql("drop table if exists DL_RAW.reg_numbers purge")

SharedSparkContext.spark.sql("""
|create table if not exists DL_RAW.reg_numbers (
Expand All @@ -66,11 +67,11 @@ class TarantoolSparkWriteClusterWithHiveTest extends AnyFunSuite with Matchers w
| ,regnum decimal(38)
| ) stored as orc""".stripMargin)
SharedSparkContext.spark.sql(s"""
|insert into dl_raw.reg_numbers values
|insert into DL_RAW.reg_numbers values
${generateRows}
|""".stripMargin)

val ds = SharedSparkContext.spark.table("dl_raw.reg_numbers")
val ds = SharedSparkContext.spark.table("DL_RAW.reg_numbers")

ds.printSchema()

Expand All @@ -79,6 +80,7 @@ class TarantoolSparkWriteClusterWithHiveTest extends AnyFunSuite with Matchers w
.option("tarantool.space", space)
.option("tarantool.stopOnError", stopOnError)
.option("tarantool.batchSize", batchSize)
.option("path", s"${SharedSparkContext.dbLocation}/dl_raw.db")
.mode(SaveMode.Overwrite)
.save()

Expand Down

0 comments on commit 121ff1b

Please sign in to comment.