package test;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.rdd.RDD;
public class ThreadTest {
    public static int port = 4050;
    public static void main(String[] args) {
        Logger.getLogger("org.apache.spark").setLevel(Level.WARN);
        Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF);
        //   System.setProperty("spark.broadcast.factory",
        // "org.apache.spark.broadcast.HttpBroadcastFactory");
          thread t1 = new thread();
          thread t2 = new thread();
          thread t3 = new thread();
          t1.start();
          t2.start();
          t3.start();
    }
    public static int g() {
        return port++;
    }
}
class thread extends Thread {
    @Override
    public void run() {
        int i = ThreadTest.g();
        System.out.println(i);
        System.setProperty("spark.cores.max", "1");
        System.setProperty("spark.executor.memory","1g");
        SparkConf conf = new SparkConf().setAppName("test"+i).setMaster("spark://master:7077");
        conf.set("spark.ui.port", String.valueOf(i));
        SparkContext sc = new SparkContext(conf);
        sc.addJar("/home/web/ideaworkspace/test1/out/artifacts/test1_jar/test1.jar");
        RDD<String> lines = sc.textFile("hdfs://master:8000/user/hive/warehouse/test.db/yiyao",1);
        System.out.println(lines.toJavaRDD().count());
        sc.stop();
    }
}4050
4051
4052
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/lib/spark/spark-assembly-1.1.0-hadoop2.2.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/spark-assembly-1.3.1-hadoop2.3.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
15/07/28 19:04:58 INFO Slf4jLogger: Slf4jLogger started
15/07/28 19:04:58 INFO Slf4jLogger: Slf4jLogger started
15/07/28 19:04:58 INFO Slf4jLogger: Slf4jLogger started
15/07/28 19:04:58 INFO Remoting: Starting remoting
15/07/28 19:04:58 INFO Remoting: Starting remoting
15/07/28 19:04:58 INFO Remoting: Starting remoting
15/07/28 19:04:58 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@datanode:33399]
15/07/28 19:04:58 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@datanode:58969]
15/07/28 19:04:58 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@datanode:55465]
15/07/28 19:04:58 INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkDriver@datanode:58969]
15/07/28 19:04:58 INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkDriver@datanode:55465]
15/07/28 19:04:58 INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkDriver@datanode:33399]
15/07/28 19:05:00 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
15/07/28 19:05:04 INFO FileInputFormat: Total input paths to process : 1
15/07/28 19:05:04 INFO FileInputFormat: Total input paths to process : 1
15/07/28 19:05:04 INFO FileInputFormat: Total input paths to process : 1
15/07/28 19:05:05 WARN BlockManager: Block broadcast_1 already exists on this machine; not re-adding it
15/07/28 19:05:05 WARN BlockManager: Block broadcast_1_piece0 already exists on this machine; not re-adding it
200
15/07/28 19:05:09 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon.
15/07/28 19:05:09 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports.
15/07/28 19:05:09 INFO Remoting: Remoting shut down
15/07/28 19:05:09 INFO RemoteActorRefProvider$RemotingTerminator: Remoting shut down.
15/07/28 19:05:10 WARN TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0, master): java.io.IOException: unexpected exception type
        java.io.ObjectStreamClass.throwMiscException(ObjectStreamClass.java:1538)
        java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1025)
        java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
        java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
        java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
        java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
        java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
        java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
        java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
        java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
        org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
        org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
        org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
        java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        java.lang.Thread.run(Thread.java:745)
15/07/28 19:05:10 ERROR TaskSetManager: Task 0 in stage 0.0 failed 4 times; aborting job
Exception in thread "Thread-0" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task 0.3 in stage 0.0 (TID 3, master): java.io.IOException: unexpected exception type
        java.io.ObjectStreamClass.throwMiscException(ObjectStreamClass.java:1538)
        java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1025)
        java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
        java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
        java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
        java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
        java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
        java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
        java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
        java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
        org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
        org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
        org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
        java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        java.lang.Thread.run(Thread.java:745)
Driver stacktrace:
        at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
        at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)
        at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1391)
        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
        at akka.actor.ActorCell.invoke(ActorCell.scala:456)
        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
        at akka.dispatch.Mailbox.run(Mailbox.scala:219)
        at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
        at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
        at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
        at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
        at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
200
15/07/28 19:05:12 ERROR ConnectionManager: Corresponding SendingConnection to ConnectionManagerId(master,60532) not found
15/07/28 19:05:13 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon.
15/07/28 19:05:13 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports.
15/07/28 19:05:13 INFO Remoting: Remoting shut down
15/07/28 19:05:13 INFO RemoteActorRefProvider$RemotingTerminator: Remoting shut down.