2016-03-22 24 views
3

Bu üzerinde kullanım kıvılcım sql hivesql i iplik kümeye yükleme sürücü olduğunda:bir java.lang.LinkageError alın: ClassCastException iplik

package com.baidu.spark.forhivetest 

import org.apache.spark.sql._ 
import org.apache.spark.sql.types._ 
import org.apache.spark.sql.hive._ 
import org.apache.spark.SparkContext 
object ForTest { 
    def main(args : Array[String]){ 
    val sc = new SparkContext() 
    val sqlc = new SQLContext(sc) 
    val hivec = new HiveContext(sc) 
    hivec.sql("CREATE TABLE IF NOT EXISTS newtest (time TIMESTAMP,word STRING,current_city_name STRING,content_src_name STRING,content_name STRING)") 
    val schema = hivec.table("newtest").schema 
    println(schema) 
    } 

kovan yapılandırma dosyasında i hive.metastore.uris ve hive.metastore.warehouse.dir

set kıvılcım-sumbit üzerinde

ekledim do kavanoz

  • DataNucleus-api-JDO-3.2.6.jar
  • DataNucleus-c Cevher-3.2.10.jar
  • DataNucleus-RDBMS 3.2.9.jar

Ben hala bu hatayı alıyorum, mysql-connector-java-5.1.38-bin.jar ve spark-1.6.0-bin-hadoop2.6/lib/guava-14.0.1.jar eklendi bile!

Ama bu kıvılcımı çalıştırdığımda, başarılı bir şekilde çalışıyor!

Birisi bana yardım edebilir! çok teşekkürler!

java.lang.LinkageError: ClassCastException: attempting to castjar:file:/mnt/hadoop/yarn/local/filecache/18/spark-assembly-1.6.0-hadoop2.6.0.jar!/javax/ws/rs/ext/RuntimeDelegate.classtojar:file:/mnt/hadoop/yarn/local/filecache/18/spark-assembly-1.6.0-hadoop2.6.0.jar!/javax/ws/rs/ext/RuntimeDelegate.class 
    at javax.ws.rs.ext.RuntimeDelegate.findDelegate(RuntimeDelegate.java:116) 
    at javax.ws.rs.ext.RuntimeDelegate.getInstance(RuntimeDelegate.java:91) 
    at javax.ws.rs.core.MediaType.<clinit>(MediaType.java:44) 
    at com.sun.jersey.core.header.MediaTypes.<clinit>(MediaTypes.java:64) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.initReaders(MessageBodyFactory.java:182) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.initReaders(MessageBodyFactory.java:175) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.init(MessageBodyFactory.java:162) 
    at com.sun.jersey.api.client.Client.init(Client.java:342) 
    at com.sun.jersey.api.client.Client.access$000(Client.java:118) 
    at com.sun.jersey.api.client.Client$1.f(Client.java:191) 
    at com.sun.jersey.api.client.Client$1.f(Client.java:187) 
    at com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193) 
    at com.sun.jersey.api.client.Client.<init>(Client.java:187) 
    at com.sun.jersey.api.client.Client.<init>(Client.java:170) 
    at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceInit(TimelineClientImpl.java:268) 
    at org.apache.hadoop.service.AbstractService.init(AbstractService.java:163) 
    at org.apache.hadoop.hive.ql.hooks.ATSHook.<init>(ATSHook.java:67) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at java.lang.Class.newInstance(Class.java:374) 
    at org.apache.hadoop.hive.ql.hooks.HookUtils.getHooks(HookUtils.java:60) 
    at org.apache.hadoop.hive.ql.Driver.getHooks(Driver.java:1309) 
    at org.apache.hadoop.hive.ql.Driver.getHooks(Driver.java:1293) 
    at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1347) 
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195) 
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) 
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:484) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:473) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:279) 
    at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:226) 
    at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:225) 
    at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:268) 
    at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:473) 
    at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:463) 
    at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:605) 
    at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:33) 
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58) 
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56) 
    at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70) 
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132) 
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130) 
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) 
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130) 
    at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55) 
    at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55) 
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) 
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) 
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) 
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817) 
    at com.baidu.spark.forhivetest.ForTest$.main(ForTest.scala:12) 
    at com.baidu.spark.forhivetest.ForTest.main(ForTest.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
    at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:542) 
16/03/22 17:04:32 INFO yarn.ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: java.lang.LinkageError: ClassCastException: attempting to castjar:file:/mnt/hadoop/yarn/local/filecache/18/spark-assembly-1.6.0-hadoop2.6.0.jar!/javax/ws/rs/ext/RuntimeDelegate.classtojar:file:/mnt/hadoop/yarn/local/filecache/18/spark-assembly-1.6.0-hadoop2.6.0.jar!/javax/ws/rs/ext/RuntimeDelegate.class) 
+1

Aynı şey için herhangi bir çözünürlük var mı? – hbabbar

+0

bu konuda herhangi bir haber var mı? –

+0

Başvuru1: https://community.smartbear.com/t5/Ready-API-ve-SoapUI-PlugIn/Got-Error-of-ClassCastException-any-special-annotations-for/mp/103803#M97 – shahjapan

cevap

0

Bu sınıf yolu ile ilgisi var:

Bu

hata bilgidir. Şişman bir kavanoz yapmamaya çalış.