build.sbt
lazy val commonSettings = Seq(
organization := "com.me",
version := "0.1.0",
scalaVersion := "2.11.0"
)
lazy val counter = (project in file("counter")).
settings(commonSettings:_*)
counter/build.sbt
name := "counter"
mainClass := Some("Counter")
scalaVersion := "2.11.0"
val sparkVersion = "2.1.1";
libraryDependencies += "org.apache.spark" %% "spark-core" % sparkVersion % "provided";
libraryDependencies += "org.apache.spark" %% "spark-sql" % sparkVersion % "provided";
libraryDependencies += "org.apache.spark" %% "spark-streaming" % sparkVersion % "provided";
libraryDependencies += "com.datastax.spark" %% "spark-cassandra-connector" % "2.0.2";
libraryDependencies += "org.apache.spark" %% "spark-streaming-kafka-0-8" % sparkVersion;
libraryDependencies += "com.github.scopt" %% "scopt" % "3.5.0";
libraryDependencies += "org.scalactic" %% "scalactic" % "3.0.1";
libraryDependencies += "org.scalatest" %% "scalatest" % "3.0.1" % "test";
mergeStrategy in assembly := {
case PathList("org", "apache", "spark", "unused", "UnusedStubClass.class") => MergeStrategy.first
case x => (mergeStrategy in assembly).value(x)
}
counter.scala:
object Counter extends SignalHandler
{
var ssc : Option[StreamingContext] = None;
def main( args: Array[String])
Run
./spark-submit --class "Counter" --master spark://10.1.204.67:6066 --deploy-mode cluster file://counter-assembly-0.1.0.jar
Error:
17/06/21 19:00:25 INFO Utils: Successfully started service 'Driver' on port 50140.
17/06/21 19:00:25 INFO WorkerWatcher: Connecting to worker spark://[email protected]:52476
Exception in thread "main" java.lang.ClassNotFoundException: Counter
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:229)
at org.apache.spark.deploy.worker.DriverWrapper$.main(DriverWrapper.scala:56)
at org.apache.spark.deploy.worker.DriverWrapper.main(DriverWrapper.scala)
Any idea? Thanks
UPDATE
I had the problem here Failed to submit local jar to spark cluster: java.nio.file.NoSuchFileException. Now, I copied the jar into spark-2.1.0-bin-hadoop2.7/bin
and then run ./spark-submit --class "Counter" --master spark://10.1.204.67:6066 --deploy-mode cluster file://Counter-assembly-0.1.0.jar
The spark cluster is of 2.1.0
But the jar was assembled in 2.1.1 and Scala 2.11.0.
file://counter-assembly-0.1.0.jar
nottarget/scala-2.11/counter-assembly-0.1.0.jar
. In other words, where do you startspark-submit
from? – Scrivnerspark-2.1.0-bin-hadoop2.7/bin
and then run./spark-submit --class "Counter" --master spark://10.1.204.67:6066 --deploy-mode cluster file://Counter-assembly-0.1.0.jar
– Lobbyism