Skip to content

Commit

Permalink
fix typos
Browse files Browse the repository at this point in the history
Signed-off-by: Ahmed Hussein (amahussein) <[email protected]>
  • Loading branch information
amahussein committed Jan 11, 2024
1 parent 30ac07a commit 30e33af
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@
<scope>test</scope>
</dependency>
<dependency>
<!-- add hive to test against detective hive Ops -->
<!-- add hive jars to test hive Ops -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ import org.apache.spark.util.Utils
// Properties stored in this container can be accessed to make decision about
// certain analysis that depends on the context of the Spark properties.
// TODO: we need to migrate SparkProperties, GpuMode to this trait.
trait CashableProps {
// A flag wether hive is enabled or not. Note that we assume that the
trait CacheableProps {
// A flag whether hive is enabled or not. Note that we assume that the
// property is global to the entire application once it is set. a.k.a, it cannot be disabled
// once it is was set to true.
var hiveEnabled = false
Expand All @@ -61,7 +61,7 @@ trait CashableProps {

abstract class AppBase(
val eventLogInfo: Option[EventLogInfo],
val hadoopConf: Option[Configuration]) extends Logging with CashableProps {
val hadoopConf: Option[Configuration]) extends Logging with CacheableProps {

var sparkVersion: String = ""
var appEndTime: Option[Long] = None
Expand Down Expand Up @@ -337,7 +337,7 @@ abstract class AppBase(
readSchema
)
}
// scan hive has no "ReaSchema" defined. So, we need to look explicitly for nodes
// "scan hive" has no "ReadSchema" defined. So, we need to look explicitly for nodes
// that are scan hive and add them one by one to the dataSource
if (hiveEnabled) { // only scan for hive when the CatalogImplementation is using hive
val allPlanWithHiveScan = getPlanInfoWithHiveScan(planInfo)
Expand Down

0 comments on commit 30e33af

Please sign in to comment.