Skip to content

Commit

Permalink
introduce util package
Browse files Browse the repository at this point in the history
  • Loading branch information
timvw committed Nov 8, 2023
1 parent a4b6700 commit bba670a
Show file tree
Hide file tree
Showing 18 changed files with 20 additions and 14 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package be.icteam.adobe.analytics.datafeed

import be.icteam.adobe.analytics.datafeed.util.{DataFile, ManifestFile}
import org.apache.spark.sql.connector.read.InputPartition

case class DatafeedPartition(dataFile: DataFile, manifestFile: ManifestFile, options: DatafeedOptions) extends InputPartition
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package be.icteam.adobe.analytics.datafeed

import be.icteam.adobe.analytics.datafeed.contributor.ValuesContributor
import be.icteam.adobe.analytics.datafeed.util.LookupFile
import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.catalyst.InternalRow
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package be.icteam.adobe.analytics.datafeed

import be.icteam.adobe.analytics.datafeed.util.ManifestFile
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.connector.read._
import org.apache.spark.sql.types.StructType
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package be.icteam.adobe.analytics.datafeed

import be.icteam.adobe.analytics.datafeed.util.ManifestFile
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.connector.read.{Scan, ScanBuilder, SupportsPushDownRequiredColumns}
import org.apache.spark.sql.types.StructType
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package be.icteam.adobe.analytics.datafeed

import be.icteam.adobe.analytics.datafeed.contributor.ValuesContributor
import be.icteam.adobe.analytics.datafeed.util.{LookupFile, ManifestFile}
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.connector.catalog.{SupportsRead, Table, TableCapability}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package be.icteam.adobe.analytics.datafeed.contributor

import be.icteam.adobe.analytics.datafeed.{LookupDatabase, LookupFile}
import be.icteam.adobe.analytics.datafeed.util.{LookupDatabase, LookupFile}
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.catalyst.util.ArrayData
import org.apache.spark.sql.types.{ArrayType, StringType, StructField, StructType}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package be.icteam.adobe.analytics.datafeed.contributor

import be.icteam.adobe.analytics.datafeed.Product
import be.icteam.adobe.analytics.datafeed.util.Product
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.catalyst.util.ArrayData
import org.apache.spark.sql.types.{ArrayType, StringType, StructField, StructType}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package be.icteam.adobe.analytics.datafeed.contributor

import be.icteam.adobe.analytics.datafeed.LookupFile
import be.icteam.adobe.analytics.datafeed.util.LookupFile
import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.types.{StringType, StructField, StructType}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.apache.hadoop.fs.Path

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.rocksdb.{Options, RocksDB}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import be.icteam.adobe.analytics.datafeed.DatafeedOptions
import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import java.util.regex.Pattern

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.apache.hadoop.fs.{FileSystem, Path}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.apache.hadoop.fs.Path
import org.scalatest.funsuite.AnyFunSuite
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.icteam.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed.util

import org.scalatest.funsuite.AnyFunSuite

Expand All @@ -9,7 +9,7 @@ class ProductParserTests extends AnyFunSuite {
/* examples from: https://experienceleague.adobe.com/docs/analytics/implementation/vars/page-vars/products.html?lang=en */
test("parse product") {

import Product.Parser.parseProducts
import be.icteam.adobe.analytics.datafeed.util.Product.Parser.parseProducts

// Include only product and category. Common on individual product pages// Include only product and category. Common on individual product pages
assert(parseProducts("Example category;Example product") == List(
Expand Down

0 comments on commit bba670a

Please sign in to comment.