AnsweredAssumed Answered

Too much information gets logged in the console when running my spark program

Question asked by vsbgugan on Feb 26, 2015
Latest reply on Feb 27, 2015 by vsbgugan
I get the following type of trace being printed on the console when running my spark program.. Do I need to turn off any flags anywhere ?


    [Loaded scala.concurrent.Await$$anonfun$ready$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend$DriverActor$$anonfun$receive$1$$anonfun$applyOrElse$3 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    15/02/26 01:08:22 INFO cluster.SparkDeploySchedulerBackend: Asking each executor to shut down
    [Loaded org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend$DriverActor$$anonfun$receive$1$$anonfun$applyOrElse$4 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages$StopExecutor$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.util.Collections$PartialImmutableValuesIterable from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$ChildrenIterable from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.FaultHandling$$anonfun$terminate$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded sun.reflect.GeneratedSerializationConstructorAccessor86 from __JVM_DefineClass__]
    [Loaded akka.actor.dungeon.ChildrenContainer$Termination$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded scala.collection.immutable.RedBlackTree$TreeIterator from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded scala.collection.immutable.RedBlackTree$ValuesIterator from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$TerminatedChildrenContainer$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.util.Collections$PartialImmutableValuesIterable$$anon$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.apache.spark.deploy.client.AppClient$ClientActor$$anonfun$postStop$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$NormalChildrenContainer$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$UserRequest$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$WaitingForChildren from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$Recreation from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.dungeon.ChildrenContainer$Creation from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.remote.transport.AssociationHandle$Shutdown$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.FSM$Failure from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.FSM$$anonfun$applyState$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.remote.transport.ProtocolStateActor$TimeoutReason$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.FSM$$anonfun$terminate$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.FSM$StopEvent from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.remote.transport.netty.NettyTransport$$anonfun$akka$remote$transport$netty$NettyTransport$$always$1$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.remote.transport.Transport$InvalidAssociationException$ from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.jboss.netty.channel.Channels$7 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.remote.transport.netty.NettyTransport$$anonfun$gracefulClose$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.OnlyCauseStackTrace$class from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded scala.util.Failure$$anonfun$recover$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded scala.concurrent.Future$$anonfun$foreach$1 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded akka.actor.ActorCell$$anonfun$3 from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.jboss.netty.channel.ExceptionEvent from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]
    [Loaded org.jboss.netty.channel.DefaultExceptionEvent from file:/opt/mapr/spark/spark-1.0.2/lib/spark-assembly-1.0.2-hadoop1.0.3-mapr-3.0.3.jar]



Outcomes