Skip to content

Commit

Permalink
rename packages to be.icteam
Browse files Browse the repository at this point in the history
  • Loading branch information
timvw committed Nov 6, 2023
1 parent 0ba4368 commit eadf0c9
Show file tree
Hide file tree
Showing 24 changed files with 28 additions and 28 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[![Maven Central](https://maven-badges.herokuapp.com/maven-central/be.timvw/adobe-analytics-datafeed-datasource_2.12/badge.svg)](https://central.sonatype.com/artifact/be.timvw/adobe-analytics-datafeed-datasource_2.12)
[![Maven Central](https://maven-badges.herokuapp.com/maven-central/be.icteam/adobe-analytics-datafeed-datasource_2.12/badge.svg)](https://central.sonatype.com/artifact/be.icteam/adobe-analytics-datafeed-datasource_2.12)

# Datasource for Adobe Analytics Data Feed

Expand All @@ -12,14 +12,14 @@ Concretly, escaped values are not handled correctly by a CSV parser due to inher
Make sure the package is in the classpath, eg: by using the --packages option:

```bash
spark-shell --packages "be.timvw:adobe-analytics-datafeed-datasource_2.12:0.0.1"
spark-shell --packages "be.icteam:adobe-analytics-datafeed-datasource_2.12:0.0.1"
```

And you can read the feed as following:

```scala
val df = spark.read
.format("be.timvw.adobe.analytics.datafeed")
.format("be.icteam.adobe.analytics.datafeed")
.load("./src/test/resources/randyzwitch")
```

Expand All @@ -40,7 +40,7 @@ We also support the Generic file source options:

```scala
val df = spark.read
.format("be.timvw.adobe.analytics.datafeed")
.format("be.icteam.adobe.analytics.datafeed")
.option(ClickstreamOptions.MODIFIED_AFTER, "2023-11-01T00:00:00")
.load("./src/test/resources/randyzwitch")
```
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ lazy val scala212 = "2.12.18"

lazy val supportedScalaVersions = List(scala212)

organization := "be.timvw"
organization := "be.icteam"
name := "adobe-analytics-datafeed-datasource"

ThisBuild / homepage := Some(url("https://github.com/timvw/adobe-analytics-datafeed-datasource"))
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
be.timvw.adobe.analytics.datafeed.DefaultSource
be.icteam.adobe.analytics.datafeed.DefaultSource
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.fs.Path

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.connector.read.InputPartition

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.apache.hadoop.conf.Configuration
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.catalyst.InternalRow
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.connector.read._
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.connector.read.{Scan, ScanBuilder, SupportsPushDownRequiredColumns}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.fs.Path
import org.apache.spark.sql.SparkSession
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.connector.catalog.Table
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import com.univocity.parsers.tsv.{TsvParser, TsvParserSettings}
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.types.{StructField, StructType}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.fs.{FileSystem, Path}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.types.{StructField, StructType}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.streaming.DataStreamReader
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.fs.Path
import org.scalatest.funsuite.AnyFunSuite
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.functions.col
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{AnalysisException, SparkSession}
// needed for 'the read clickstream extension method works' test
import be.timvw.adobe.analytics.datafeed.implicits._
import be.icteam.adobe.analytics.datafeed.implicits._
import org.scalatest.funsuite.AnyFunSuite

class DefaultSourceTest extends AnyFunSuite {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package be.timvw.adobe.analytics.datafeed
package be.icteam.adobe.analytics.datafeed

import org.apache.spark.sql.SparkSession

Expand Down

0 comments on commit eadf0c9

Please sign in to comment.