2015-11-06 113 views
3

当spark正在运行用sbt测试时。我得到这个异常:Apache Spark错误的akka​​-remote netty版本

18:58:49.049 [sparkDriver-akka.actor.default-dispatcher-2] ERROR akka.actor.ActorSystemImpl - Uncaught fatal error from thread [sparkDriver-akka.remote.default-remote-dispatcher-5] shutting down ActorSystem [sparkDriver] 
java.lang.VerifyError: (class: org/jboss/netty/channel/socket/nio/NioWorkerPool, method: createWorker signature: (Ljava/util/concurrent/Executor;)Lorg/jboss/netty/channel/socket/nio/AbstractNioWorker;) Wrong return type in function 
at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:283) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:240) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_45] 
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_45] 
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_45] 
at java.lang.reflect.Constructor.newInstance(Constructor.java:422) ~[na:1.8.0_45] 
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at scala.util.Try$.apply(Try.scala:192) ~[scala-library-2.11.7.jar:0.13.8] 
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at scala.util.Success.flatMap(Try.scala:231) ~[scala-library-2.11.7.jar:0.13.8] 
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:692) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:684) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:728) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.Iterator$class.foreach(Iterator.scala:742) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:727) ~[scala-library-2.11.7.jar:0.13.8] 
at akka.remote.EndpointManager.akka$remote$EndpointManager$$listens(Remoting.scala:684) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager$$anonfun$receive$2.applyOrElse(Remoting.scala:492) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.actor.Actor$class.aroundReceive(Actor.scala:465) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager.aroundReceive(Remoting.scala:395) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:516) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.actor.ActorCell.invoke(ActorCell.scala:487) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:238) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.dispatch.Mailbox.run(Mailbox.scala:220) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) [akka-actor_2.11-2.3.4-spark.jar:na] 
... 

第一件事,我不得不认为这是我的子LIB的一个进口网状的版本错误。

看着我的依赖关系图后,我发现4个不同版本的netty。 3.6.6 3.8.0 3.9.3 4.0.23

论文版本大多是由火花本人进口:○ 的4.0.23直接由火花进口和3.8.0由其子依赖性Akka-remote

我试图从我所有的子依赖性排除网状依赖与excludeAll(ExclusionRule(organization = "io.netty"))并逐一添加每个netty版本。但它没有解决问题。 我也尝试排除所有com.typesafe.akka deps使用Akka-remote 2.4.0我得到同样的问题。

Akka-remote需要的netty版本是3.8.0。甚至强迫这个不起作用。我还强迫我的项目使用空间火花Akka版本Akka-remote:3.8.0-spark,它们不会改变任何内容。

的信息我build.sbt

libraryDependencies ++= Seq(
    "com.datastax.cassandra" % "cassandra-driver-core"    % "2.1.7.1" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "com.datastax.spark"  %% "spark-cassandra-connector"   % "1.4.0" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "com.datastax.spark"  %% "spark-cassandra-connector-embedded" % "1.4.0" % Test excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "xxx.xxxxx"    %% "shed"        % "0.10.0-MOK-1848-DEBUG-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "com.twitter"    %% "util-collection"      % "6.27.0" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.circe"    %% "circe-core"       % "0.2.0-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.circe"    %% "circe-generic"      % "0.2.0-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.circe"    %% "circe-jawn"       % "0.2.0-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.netty"    % "netty"        % "3.6.6.Final", 
    //"com.typesafe.akka"  % "akka-remote_2.11"     % "2.3.4", 
    "org.apache.cassandra" % "cassandra-all"      % "2.1.5" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.cassandra" % "cassandra-thrift"     % "2.0.5" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-streaming-kafka"    % "1.4.0" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-streaming"      % sparkVersion % "provided" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-core"       % sparkVersion % "provided" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-sql"       % sparkVersion % "provided" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.scalaz.stream"  % "scalaz-stream_2.11"     % "0.7.3" % Test excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.specs2"    %% "specs2-core"       % "3.6.1-scalaz-7.0.6" % "test" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")) 
) 

正如你看到我尝试排除最大的事情我尝试

和子项目棚含有

"com.github.scopt"   %% "scopt"    % "3.3.0" , 
    "com.typesafe.akka"   %% "akka-testkit"   % "2.3.8"    % "test", 
    "com.typesafe.play"   %% "play-test"   % "2.3.8"    % "test", 
    "com.tinkerpop.gremlin"  % "gremlin-java"   % gremlinVersion, 
    "com.tinkerpop"    % "pipes"    % gremlinVersion, 
    "com.thinkaurelius.titan" % "titan-core"   % titanVersion, 
    "com.thinkaurelius.titan" % "titan-cassandra"  % titanVersion, 
    "com.thinkaurelius.titan" % "titan-berkeleyje"  % titanVersion, 
    "com.netaporter"    %% "scala-uri"   % "0.4.8", 
    "com.github.nscala-time"  %% "nscala-time"   % "1.8.0", 
    "com.mandubian"    %% "play-json-zipper"  % "1.2", 
    "com.michaelpollmeier"  %% "gremlin-scala"  % "2.6.1", 
    "com.ansvia.graph"   %% "blueprints-scala"  % "0.1.61-20150416-SNAPSHOT", 
    "io.kanaka"     %% "play-monadic-actions" % "1.0.1" exclude("com.typesafe.play", "play_2.11"), 
    "org.scalaz"     %% "scalaz-concurrent" % "7.0.6", 
    "com.chuusai"    %% "shapeless"   % "2.3.0-SNAPSHOT", 
    ("io.github.jto"    %% "validation-json"  % "1.0").exclude("org.tpolecat", "tut-core_2.11"), 
    "org.parboiled"    %% "parboiled"   % "2.1.0", 
    "com.typesafe.scala-logging" %% "scala-logging"  % "3.1.0", 
    "ch.qos.logback"    % "logback-classic"  % "1.1.2", 
    "xxx.xxxxxxxxxx"    %% "chuck"    % "0.9.0-SNAPSHOT", 
    "xxx.xxxxxxxxxx"    %% "shed-graph"   % "0.9.0-MOK-1848-SNAPSHOT" exclude("com.thinkaurelius.titan", "titan-core"), 
    "io.circe"     %% "circe-core"   % "0.2.0-SNAPSHOT", 
    "io.circe"     %% "circe-generic"  % "0.2.0-SNAPSHOT", 
    "io.circe"     %% "circe-jawn"   % "0.2.0-SNAPSHOT" 
+0

你能分享你的sbt依赖项部分吗? – eliasah

+0

是的,我刚刚添加build.sbt deps – crak

回答

6

这是陷阱!

网络组织在过去发生了变化。 从org.jboss.netty到io.netty,但它们包含相同的包。

exclude(“org.jboss.netty”,“netty”)解决了我的问题。

+0

我有同样的问题,你使用哪个版本的火花?,你可以共享添加这一行到哪个文件,以及哪个sbt构建函数?谢谢! – Djvu

+0

使用exclude(“org.jboss.netty”,“netty”)就像那样'''com.thinkaurelius.titan“%”titan-cassandra“%titanVersion exclude(”org.jboss.netty“,”netty“) ,''' 当然你可能有另一个依赖包括netty。 – crak

+0

感谢您的回答,我使用了Spark1.5,它使用了pom.xml管理依赖关系,所以我没有如何改变它。我后来设置了一个artifactory代理,清除所有内容并重新编译,它工作。这很奇怪,因为我编译了很多次,并且总是编译没有错误,我不会发生什么事情,或许最近在某些依赖关系瓶中有所改变? – Djvu