You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
It seems that while loading com.databricks, it throws a ServiceConfiguration error. I have tried loading spark-sv_2.{10,11}-1.{3,4,5}.0.jar. It executes well on my local machine, but fails on CDAP Sandbox 4.3.3-1 (Amazon AWS).
java.util.ServiceConfigurationError: org.apache.spark.sql.sources.DataSourceRegister: Provider com.databricks.spark.csv.DefaultSource15 not found
at java.util.ServiceLoader.fail(ServiceLoader.java:239) ~[na:1.8.0_151]
at java.util.ServiceLoader.access$300(ServiceLoader.java:185) ~[na:1.8.0_151]
at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:372) ~[na:1.8.0_151]
at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404) ~[na:1.8.0_151]
at java.util.ServiceLoader$1.next(ServiceLoader.java:480) ~[na:1.8.0_151]
at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:42) ~[na:na]
at scala.collection.Iterator$class.foreach(Iterator.scala:727) ~[na:na]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) ~[na:na]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[na:na]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[na:na]
at scala.collection.TraversableLike$class.filter(TraversableLike.scala:263) ~[na:na]
at scala.collection.AbstractTraversable.filter(Traversable.scala:105) ~[na:na]
at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.lookupDataSource(ResolvedDataSource.scala:59) ~[na:na]
at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:102) ~[na:na]
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119) ~[na:na]
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:109) ~[na:na]
at com.corridor.transform.ProcessLC$.main(ProcessLC.scala:41) ~[na:na]
at com.corridor.transform.ProcessLC.main(ProcessLC.scala) ~[na:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_151]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_151]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_151]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_151]
at co.cask.cdap.datapipeline.JavaSparkMainWrapper$1.call(JavaSparkMainWrapper.java:73) ~[na:na]
at co.cask.cdap.datapipeline.JavaSparkMainWrapper$1.call(JavaSparkMainWrapper.java:70) ~[na:na]
at co.cask.cdap.etl.common.plugin.Caller$1.call(Caller.java:30) ~[na:na]
at co.cask.cdap.etl.common.plugin.StageLoggingCaller.call(StageLoggingCaller.java:40) ~[na:na]
at co.cask.cdap.datapipeline.JavaSparkMainWrapper.run(JavaSparkMainWrapper.java:70) ~[na:na]
at co.cask.cdap.app.runtime.spark.SparkMainWrapper$.main(SparkMainWrapper.scala:82) ~[na:na]
at co.cask.cdap.app.runtime.spark.SparkMainWrapper.main(SparkMainWrapper.scala) ~[na:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_151]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_151]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_151]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_151]
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) ~[na:na]
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) ~[na:na]
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) ~[na:na]
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) ~[na:na]
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ~[na:na]
at co.cask.cdap.app.runtime.spark.submit.AbstractSparkSubmitter.submit(AbstractSparkSubmitter.java:171) ~[na:na]
at co.cask.cdap.app.runtime.spark.submit.AbstractSparkSubmitter.access$000(AbstractSparkSubmitter.java:53) ~[na:na]
at co.cask.cdap.app.runtime.spark.submit.AbstractSparkSubmitter$5.run(AbstractSparkSubmitter.java:110) ~[na:na]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_151]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_151]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[na:1.8.0_151]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[na:1.8.0_151]
at java.lang.Thread.run(Thread.java:748) [na:1.8.0_151]
The text was updated successfully, but these errors were encountered:
It seems that while loading com.databricks, it throws a ServiceConfiguration error. I have tried loading spark-sv_2.{10,11}-1.{3,4,5}.0.jar. It executes well on my local machine, but fails on CDAP Sandbox 4.3.3-1 (Amazon AWS).
java.util.ServiceConfigurationError: org.apache.spark.sql.sources.DataSourceRegister: Provider com.databricks.spark.csv.DefaultSource15 not found
at java.util.ServiceLoader.fail(ServiceLoader.java:239) ~[na:1.8.0_151]
at java.util.ServiceLoader.access$300(ServiceLoader.java:185) ~[na:1.8.0_151]
at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:372) ~[na:1.8.0_151]
at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404) ~[na:1.8.0_151]
at java.util.ServiceLoader$1.next(ServiceLoader.java:480) ~[na:1.8.0_151]
at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:42) ~[na:na]
at scala.collection.Iterator$class.foreach(Iterator.scala:727) ~[na:na]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) ~[na:na]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[na:na]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[na:na]
at scala.collection.TraversableLike$class.filter(TraversableLike.scala:263) ~[na:na]
at scala.collection.AbstractTraversable.filter(Traversable.scala:105) ~[na:na]
at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.lookupDataSource(ResolvedDataSource.scala:59) ~[na:na]
at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:102) ~[na:na]
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119) ~[na:na]
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:109) ~[na:na]
at com.corridor.transform.ProcessLC$.main(ProcessLC.scala:41) ~[na:na]
at com.corridor.transform.ProcessLC.main(ProcessLC.scala) ~[na:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_151]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_151]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_151]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_151]
at co.cask.cdap.datapipeline.JavaSparkMainWrapper$1.call(JavaSparkMainWrapper.java:73) ~[na:na]
at co.cask.cdap.datapipeline.JavaSparkMainWrapper$1.call(JavaSparkMainWrapper.java:70) ~[na:na]
at co.cask.cdap.etl.common.plugin.Caller$1.call(Caller.java:30) ~[na:na]
at co.cask.cdap.etl.common.plugin.StageLoggingCaller.call(StageLoggingCaller.java:40) ~[na:na]
at co.cask.cdap.datapipeline.JavaSparkMainWrapper.run(JavaSparkMainWrapper.java:70) ~[na:na]
at co.cask.cdap.app.runtime.spark.SparkMainWrapper$.main(SparkMainWrapper.scala:82) ~[na:na]
at co.cask.cdap.app.runtime.spark.SparkMainWrapper.main(SparkMainWrapper.scala) ~[na:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_151]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_151]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_151]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_151]
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) ~[na:na]
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) ~[na:na]
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) ~[na:na]
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) ~[na:na]
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ~[na:na]
at co.cask.cdap.app.runtime.spark.submit.AbstractSparkSubmitter.submit(AbstractSparkSubmitter.java:171) ~[na:na]
at co.cask.cdap.app.runtime.spark.submit.AbstractSparkSubmitter.access$000(AbstractSparkSubmitter.java:53) ~[na:na]
at co.cask.cdap.app.runtime.spark.submit.AbstractSparkSubmitter$5.run(AbstractSparkSubmitter.java:110) ~[na:na]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_151]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_151]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[na:1.8.0_151]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[na:1.8.0_151]
at java.lang.Thread.run(Thread.java:748) [na:1.8.0_151]
The text was updated successfully, but these errors were encountered: