2017-06-19 63 views
0

的build.gradle不能与卡桑德拉驱动程序运行星火工作

plugins { 
    id 'java' 
    id 'com.github.johnrengelman.shadow' version '1.2.3' 
} 

group 'com.hello.aggregation' 
version '1.0-SNAPSHOT' 

apply plugin: 'java' 
apply plugin: 'scala' 
apply plugin: 'idea' 

sourceCompatibility = 1.8 
targetCompatibility = 1.8 

configurations { 
    provided 
} 

sourceSets { 
    main { 
     compileClasspath += configurations.provided 
    } 
} 

repositories { 
    mavenCentral() 
} 

dependencies { 
    compile "org.scala-lang:scala-library:$scalaVersion" 
    compile "org.scala-lang:scala-reflect:$scalaVersion" 
    compile "org.scala-lang:scala-compiler:$scalaVersion" 

    compile "org.apache.spark:spark-core_$scalaBase:$sparkVersion" 
    compile "org.apache.spark:spark-sql_$scalaBase:$sparkVersion" 

    compile "com.datastax.cassandra:cassandra-driver-core:$cassandraDriverVersion" 
    compile "com.datastax.spark:spark-cassandra-connector_$scalaBase:$connectorVersion" 

    compile "org.slf4j:slf4j-api:$slf4jVersion" 

    compile "mysql:mysql-connector-java:$mySqlConnectorVersion" 

    testCompile group: 'junit', name: 'junit', version: '4.12' 
} 

task run(type: JavaExec, dependsOn: classes) { 
    main = mainClassFile 
    classpath sourceSets.main.runtimeClasspath 
    classpath configurations.runtime 
} 

jar { 
    classifier = 'all' 
    manifest { 
     attributes 'Implementation-Title': title, 
       'Implementation-Version': version, 
       'Main-Class': mainClassFile 
    } 
    include{sourceSets.main.output.classesDir} 
    zip64 true 
} 

shadowJar { 
    classifier = 'shadow' 
    append 'reference.conf' 
    dependencies { 

    } 

    zip64 true 
} 

idea { 
    module { 
     // IntelliJ does not know about the standard idiom of provided as used in managing 
     // uber/shaded jar dependencies. Make it so! 
     scopes.PROVIDED.plus += [ configurations.provided ] 
    } 
} 

gradle.properties

version=1.0.0 

scalaBase=2.11 
scalaVersion=2.11.4 
slf4jVersion=1.7.25 
sparkVersion=1.6.3 
connectorVersion=1.6.7 
cassandraDriverVersion=3.0.7 
mySqlConnectorVersion=5.1.37 

例外:

17/06/19 16:03:54 INFO BlockManagerMaster: Registered BlockManager 
Exception in thread "main" java.lang.NoSuchMethodError: scala.runtime.ObjectRef.zero()Lscala/runtime/ObjectRef; 
     at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala) 
     at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$7.apply(CassandraConnector.scala:150) 
     at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$7.apply(CassandraConnector.scala:150) 
     at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:32) 
     at com.datastax.spark.connector.cql.RefCountedCache.syncAcquire(RefCountedCache.scala:69) 
     at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:57) 
     at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:80) 
     at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:107) 
     at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:118) 
     at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:330) 
     at com.datastax.spark.connector.cql.Schema$.tableFromCassandra(Schema.scala:350) 
     at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:50) 
     at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:60) 
     at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:60) 
     at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:137) 
     at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:60) 
     at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:230) 
     at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) 
     at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) 
     at scala.Option.getOrElse(Option.scala:120) 
     at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) 
     at org.apache.spark.rdd.RDD$$anonfun$distinct$2.apply(RDD.scala:359) 
     at org.apache.spark.rdd.RDD$$anonfun$distinct$2.apply(RDD.scala:359) 
     at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) 
     at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111) 
     at org.apache.spark.rdd.RDD.withScope(RDD.scala:316) 
     at org.apache.spark.rdd.RDD.distinct(RDD.scala:358) 
     at com.achoo.scala.streambright.SimpleDailyRun$.delayedEndpoint$com$achoo$scala$streambright$SimpleDailyRun$1(SimpleDailyRun.scala:30) 
     at com.achoo.scala.streambright.SimpleDailyRun$delayedInit$body.apply(SimpleDailyRun.scala:14) 
     at scala.Function0$class.apply$mcV$sp(Function0.scala:40) 
     at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12) 
     at scala.App$$anonfun$main$1.apply(App.scala:71) 
     at scala.App$$anonfun$main$1.apply(App.scala:71) 
     at scala.collection.immutable.List.foreach(List.scala:318) 

代码:

package com.streambright 

import java.sql.{Connection, DriverManager} 

import com.mysql.jdbc.Driver 
import org.apache.spark.rdd.JdbcRDD 
import org.apache.spark.{SparkConf, SparkContext} 
import com.datastax.spark.connector.toSparkContextFunctions 
import org.apache.spark.sql.cassandra.CassandraSQLContext 


object SimpleDailyRun extends App { 
    DriverManager.registerDriver(new Driver()) 
    val config = new SparkConf(true).setAppName("Simple Daily Run") 
    val sc = SparkContext.getOrCreate(config) 
    val cc = new CassandraSQLContext(sc) 
    cc.setKeyspace("achoo") 
    val conn = DriverManager.getConnection("jdbc:mysql://10.175.190.95/db?useUnicode=yes&characterEncoding=UTF-8&user=user&password=pass") 

    val mySqlJdbcRDD = new JdbcRDD(sc,() => conn, 
    "SELECT b.project_id,a.keyword FROM keyword a " + 
     "JOIN project_keyword b ON a.id = b.keyword_id LIMIT ?, ?", 
    0, 100000000, 1, r => (r.getInt("project_id"), r.getString("keyword"))) 

    val cassandraRDD = sc.cassandraTable("hello", "instagram_keyword_analytic") 
    .select("keyword", "relativepath") 
    .as((_: String, _: String)) 
    .distinct() 

    mySqlJdbcRDD.saveAsTextFile("/data/MySQL_projectid_keywords_"+System.currentTimeMillis()+".txt") 
    cassandraRDD.saveAsTextFile("/data/MySQL_projectid_keywords_"+System.currentTimeMillis()+".txt") 

} 

卡桑德拉版本:cassandra21-2.1.15-1(DataStax) 星火版本:1.6.3

是否有人知道如何解决这一问题?

回答

0

解决此问题的方法是使用Scala 2.10而不是2.11。

相关问题