/opt/jdk1.8.0_121/bin/java -javaagent:/opt/idea-IC-171.4249.39/lib/idea_rt.jar=41331:/opt/idea-IC-171.4249.39/bin -Dfile.encoding=UTF-8 -classpath /opt/jdk1.8.0_121/jre/lib/charsets.jar:/opt/jdk1.8.0_121/jre/lib/deploy.jar:/opt/jdk1.8.0_121/jre/lib/ext/cldrdata.jar:/opt/jdk1.8.0_121/jre/lib/ext/dnsns.jar:/opt/jdk1.8.0_121/jre/lib/ext/jaccess.jar:/opt/jdk1.8.0_121/jre/lib/ext/jfxrt.jar:/opt/jdk1.8.0_121/jre/lib/ext/localedata.jar:/opt/jdk1.8.0_121/jre/lib/ext/mysql-connector-java-5.1.40-bin.jar:/opt/jdk1.8.0_121/jre/lib/ext/nashorn.jar:/opt/jdk1.8.0_121/jre/lib/ext/sunec.jar:/opt/jdk1.8.0_121/jre/lib/ext/sunjce_provider.jar:/opt/jdk1.8.0_121/jre/lib/ext/sunpkcs11.jar:/opt/jdk1.8.0_121/jre/lib/ext/zipfs.jar:/opt/jdk1.8.0_121/jre/lib/javaws.jar:/opt/jdk1.8.0_121/jre/lib/jce.jar:/opt/jdk1.8.0_121/jre/lib/jfr.jar:/opt/jdk1.8.0_121/jre/lib/jfxswt.jar:/opt/jdk1.8.0_121/jre/lib/jsse.jar:/opt/jdk1.8.0_121/jre/lib/management-agent.jar:/opt/jdk1.8.0_121/jre/lib/plugin.jar:/opt/jdk1.8.0_121/jre/lib/resources.jar:/opt/jdk1.8.0_121/jre/lib/rt.jar:/home/hadoop/IdeaProjects/Spark_19/target/scala-2.11/classes:/home/hadoop/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.0.jar SparkPi
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/SparkConf
    at SparkPi$.main(SparkPi.scala:15)
    at SparkPi.main(SparkPi.scala)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.SparkConf
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    ... 2 moreProcess finished with exit code 1

解决方案 »

  1.   

    代码:
    import org.apache.spark.SparkConf
    import org.apache.spark.SparkContextimport scala.collection.mutable.ArrayBuffer
    import scala.math.randomobject SparkPi {
      def main(args: Array[String]) {
        val jar: String = ""
        val jars = ArrayBuffer[String]()
        jars += jar
        val conf = new SparkConf().setMaster("spark://master:7077").setAppName("Spark Pi").setJars(jars)
        val spark = new SparkContext(conf)
        val slices = if (args.length > 0) args(0).toInt else 2
        val n = math.min(100000L * slices, Int.MaxValue).toInt
        val count = spark.parallelize(1 until n, slices).map { i =>
          val x = random * 2 - 1
          val y = random * 2 - 1
          if (x*x + y*y < 1) 1 else 0
        }.reduce(_ + _)
        println("Pi is roughly " + 4.0 * count / n)
        spark.stop()
      }
    }
      

  2.   

    https://blog.csdn.net/qq_29269907/article/details/83746542看下这篇文章是否可用解决你的问题