AnsweredAssumed Answered

Spark .toDF() returns linkage error: duplicate class definition

Question asked by dzndrx on Feb 10, 2017
Latest reply on Feb 14, 2017 by dzndrx

Please help me im trying to convert a RDD to Dataframe and im pretty sure I follow the rules on doing it but it seems that this error is keeping my code running.I dont know hot to resolve the thing duplicate class definition.

Here is my code in spark

val auctionDF = auctionsRDD.toDF()

Heres the error.

java.lang.reflect.InvocationTargetException    at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)    at java.lang.reflect.Method.invoke(Method.java:498)    at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)        at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)        at com.mapr.fs.MapRFileSystem.<clinit>(MapRFileSystem.java:107)        at java.lang.Class.forName0(Native Method)        at java.lang.Class.forName(Class.java:348)        at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)        at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)        at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)        at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)        at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)        at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)        at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)        at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)        at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)        at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)        at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)        at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)        at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)        at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)        at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)        at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)        at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)        at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:161)        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:167)        at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)        at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)        at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)        at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:35)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:40)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:42)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:46)        at $line30.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:48)        at $line30.$read$$iw$$iw$$iw$$iw.<init>(<console>:50)        at $line30.$read$$iw$$iw$$iw.<init>(<console>:52)        at $line30.$read$$iw$$iw.<init>(<console>:54)        at $line30.$read$$iw.<init>(<console>:56)        at $line30.$read.<init>(<console>:58)        at $line30.$read$.<init>(<console>:62)        at $line30.$read$.<clinit>(<console>)        at $line30.$eval$.$print$lzycompute(<console>:7)        at $line30.$eval$.$print(<console>:6)        at $line30.$eval.$print(<console>)        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:498)        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)        at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)        at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)        at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)        at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)        at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)        at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)        at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)        at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)        at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)        at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)        at org.apache.spark.repl.Main$.doMain(Main.scala:68)        at org.apache.spark.repl.Main$.main(Main.scala:51)        at org.apache.spark.repl.Main.main(Main.scala)        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:498)        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736)        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Caused by: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"        at java.lang.ClassLoader.defineClass1(Native Method)        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)        ... 98 more java.lang.RuntimeException: Failure loading MapRClient.        at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:305)        at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)        at com.mapr.fs.MapRFileSystem.<clinit>(MapRFileSystem.java:107)        at java.lang.Class.forName0(Native Method)        at java.lang.Class.forName(Class.java:348)        at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)        at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)        at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)        at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)        at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)        at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)        at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)        at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)        at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)        at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)        at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)        at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)        at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)        at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)        at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)        at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)        at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)        at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:161)        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:167)        at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)        at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)        at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)        at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:35)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:40)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:42)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:46)        at $line30.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:48)        at $line30.$read$$iw$$iw$$iw$$iw.<init>(<console>:50)        at $line30.$read$$iw$$iw$$iw.<init>(<console>:52)        at $line30.$read$$iw$$iw.<init>(<console>:54)        at $line30.$read$$iw.<init>(<console>:56)        at $line30.$read.<init>(<console>:58)        at $line30.$read$.<init>(<console>:62)        at $line30.$read$.<clinit>(<console>)        at $line30.$eval$.$print$lzycompute(<console>:7)        at $line30.$eval$.$print(<console>:6)        at $line30.$eval.$print(<console>)        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:498)        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)        at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)        at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)        at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)        at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)        at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)        at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)        at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)        at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)        at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)        at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)        at org.apache.spark.repl.Main$.doMain(Main.scala:68)        at org.apache.spark.repl.Main$.main(Main.scala:51)        at org.apache.spark.repl.Main.main(Main.scala)        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:498)        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736)        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Caused by: java.lang.reflect.InvocationTargetException        at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:498)        at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)        ... 94 moreCaused by: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"        at java.lang.ClassLoader.defineClass1(Native Method)        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)        ... 98 more java.lang.RuntimeException: Failure loading MapRClient.  at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:305)  at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)  at com.mapr.fs.MapRFileSystem.<clinit>(MapRFileSystem.java:107)  at java.lang.Class.forName0(Native Method)  at java.lang.Class.forName(Class.java:348)  at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)  at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)  at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)  at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)  at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)  at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)  at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)  at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)  at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)  at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)  at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)  at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)  at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)  at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)  at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)  at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)  at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)  at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)  at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)  at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)  at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)  at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)  at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)  at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)  at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)  at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)  at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)  at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)  at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)  at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)  at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:161)  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:167)  at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)  at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)  at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)  at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)  ... 50 elidedCaused by: java.lang.reflect.InvocationTargetException: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"  at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)  at java.lang.reflect.Method.invoke(Method.java:498)  at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)  ... 94 moreCaused by: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"  at java.lang.ClassLoader.defineClass1(Native Method)  at java.lang.ClassLoader.defineClass(ClassLoader.java:763)  ... 98 more

Outcomes