From 01c2c3a44c7bf0551188ece36467c75aff803fc4 Mon Sep 17 00:00:00 2001 From: Alexey Kuzin Date: Mon, 10 Apr 2023 18:35:45 -0400 Subject: [PATCH 1/2] Fix crossScalaVersions in release settings --- build.sbt | 1 - 1 file changed, 1 deletion(-) diff --git a/build.sbt b/build.sbt index 7ddd8da..1fb4d7b 100644 --- a/build.sbt +++ b/build.sbt @@ -128,7 +128,6 @@ lazy val root = (project in file(".")) }, publishMavenStyle := true, // Release settings - crossScalaVersions := Nil, releaseProcess := Seq[ReleaseStep]( checkSnapshotDependencies, inquireVersions, From 121ff1b343bae7e2c649ced3d118e8dd12b8f539 Mon Sep 17 00:00:00 2001 From: Alexey Kuzin Date: Sat, 15 Apr 2023 09:20:54 -0400 Subject: [PATCH 2/2] Write all test files to tmp directory and correctly delete them Fixes problems with leftovers from previous launches preventing running tests for all Scala versions sequentially (with +test) --- .../connector/integration/SharedSparkContext.scala | 13 +++++++------ .../TarantoolSparkWriteClusterWithHiveTest.scala | 10 ++++++---- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/test/scala/io/tarantool/spark/connector/integration/SharedSparkContext.scala b/src/test/scala/io/tarantool/spark/connector/integration/SharedSparkContext.scala index 2f9c117..e5cbd45 100644 --- a/src/test/scala/io/tarantool/spark/connector/integration/SharedSparkContext.scala +++ b/src/test/scala/io/tarantool/spark/connector/integration/SharedSparkContext.scala @@ -12,7 +12,7 @@ import scala.reflect.io.Directory /** Shared Docker container and Spark instance between all tests cases */ object SharedSparkContext extends Logging { - private lazy val warehouseLocation = Files.createTempDirectory("spark-wirehouse").toFile + private lazy val warehouseLocation = Files.createTempDirectory("spark-warehouse").toFile private lazy val clusterCookie = sys.env.getOrElse("TARANTOOL_CLUSTER_COOKIE", "testapp-cluster-cookie") @@ -74,9 +74,10 @@ object SharedSparkContext extends Logging { .config(conf) .config("spark.ui.enabled", false) .config("spark.sql.warehouse.dir", warehouseLocationPath) + .config("hive.metastore.warehouse.dir", warehouseLocationPath) .config( "javax.jdo.option.ConnectionURL", - "jdbc:derby:;databaseName=tarantoolTest;create=true" + s"jdbc:derby:;databaseName=$warehouseLocationPath/tarantoolTest;create=true" ) if (withHiveSupport) @@ -96,6 +97,9 @@ object SharedSparkContext extends Logging { _conf } + def dbLocation: String = + warehouseLocation.getAbsolutePath + def sc: SparkContext = sparkSession.get().sparkContext @@ -107,11 +111,8 @@ object SharedSparkContext extends Logging { if (sparkSession.compareAndSet(scRef, null)) { scRef.stop() } - cleanupTempDirectory() - } - - def cleanupTempDirectory(): Unit = Directory(warehouseLocation).deleteRecursively() + } def teardown(): Unit = { container.stop() diff --git a/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterWithHiveTest.scala b/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterWithHiveTest.scala index 5fb8a9d..e74de01 100644 --- a/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterWithHiveTest.scala +++ b/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterWithHiveTest.scala @@ -56,8 +56,9 @@ class TarantoolSparkWriteClusterWithHiveTest extends AnyFunSuite with Matchers w forAll(variants) { (stopOnError, batchSize) => val space = "reg_numbers" - SharedSparkContext.spark.sql("create database if not exists dl_raw") - SharedSparkContext.spark.sql("drop table if exists DL_RAW.reg_numbers") + SharedSparkContext.spark.sql("drop database if exists DL_RAW cascade") + SharedSparkContext.spark.sql("create database if not exists DL_RAW") + SharedSparkContext.spark.sql("drop table if exists DL_RAW.reg_numbers purge") SharedSparkContext.spark.sql(""" |create table if not exists DL_RAW.reg_numbers ( @@ -66,11 +67,11 @@ class TarantoolSparkWriteClusterWithHiveTest extends AnyFunSuite with Matchers w | ,regnum decimal(38) | ) stored as orc""".stripMargin) SharedSparkContext.spark.sql(s""" - |insert into dl_raw.reg_numbers values + |insert into DL_RAW.reg_numbers values ${generateRows} |""".stripMargin) - val ds = SharedSparkContext.spark.table("dl_raw.reg_numbers") + val ds = SharedSparkContext.spark.table("DL_RAW.reg_numbers") ds.printSchema() @@ -79,6 +80,7 @@ class TarantoolSparkWriteClusterWithHiveTest extends AnyFunSuite with Matchers w .option("tarantool.space", space) .option("tarantool.stopOnError", stopOnError) .option("tarantool.batchSize", batchSize) + .option("path", s"${SharedSparkContext.dbLocation}/dl_raw.db") .mode(SaveMode.Overwrite) .save()