package stream.scala

import java.io.PrintWriter
import java.net.ServerSocket class LoggerSimulation { } object LoggerSimulation{
/**
* 生成一个字母
* @param 字母的下标
* @return 生成的字母
*/
def gennerateContent(index:Int):String = {
import scala.collection.mutable.ListBuffer
val charList = ListBuffer[Char]();
for (i<- to ){
charList += i.toChar
}
val charArray = charList.toArray
charArray(index).toString();
} /**
* 生成随机下标
* @return 返回一个下标
*/
def index = {
import java.util.Random
val rdm = new Random()
rdm.nextInt()
} /**
* 启动一个main方法来创建一个serversockt发送消息
* @param args 端口,发送的时间间隔
*/
def main(args: Array[String]): Unit = {
// if (args.length !=2){
// System.err.println("Usage:<port><millisecond>")
// System.exit(1);
// } val listener = new ServerSocket(.toInt)
println("已经做好连接的准备-------")
while(true){
val socket = listener.accept()
new Thread(){
override def run(): Unit = {
println("Got client connected from:"+socket.getInetAddress)
val out = new PrintWriter(socket.getOutputStream,true)
while(true){
Thread.sleep(.toLong)
val content = gennerateContent(index)
println(content)
out.write(content+"\n")
out.flush()
}
socket.close()
}
}.start()
}
}
} -------------------------------------------------- package stream.scala
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext} /**
* 这是一个接收来自网络端口的信息
* 参数 spark集群的主节点地址,网络通信的节点地址,网络通信的端口,每个多长时间作为一个单位进行执行任务
* local[*] localhost 8888 5
*/
class NetWorkWordCount { }
object NetWorkWordCount{
def main(args: Array[String]): Unit = {
// if(args.length!=4){
// System.err.println("Usage:NetWorkWordCount<master> <hostname> <port> <seconds>\n+" +
// "In local modle,<master> should be 'local[n]' with n >1 ");
// System.exit(1);
// } val config = new SparkConf().setAppName("NetWorkWordCount").setMaster("local[*]");
val ssc = new StreamingContext(config,Seconds(.toInt));
val lines = ssc.socketTextStream("localhost",.toInt,StorageLevel.MEMORY_ONLY_SER)
val words = lines.flatMap(line => line.split(" "));
val wordCount = words.map(x=>(x,)).reduceByKey(_+_);
wordCount.print()
ssc.start();
ssc.awaitTermination();
}
} *************************************************************************
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_136 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_136_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_136_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 182.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 182.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 182.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 182.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 182.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 182.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.058539 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_137 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_137_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_137_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 184.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 184.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 184.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 184.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 1.0 in stage 184.0 (TID )
// :: INFO Executor: Running task 2.0 in stage 184.0 (TID )
// :: INFO Executor: Running task 0.0 in stage 184.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 2.0 in stage 184.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 1.0 in stage 184.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 0.0 in stage 184.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 2.0 in stage 184.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 1.0 in stage 184.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 0.0 in stage 184.0 (TID ) in ms on localhost (/)
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO TaskSchedulerImpl: Removed TaskSet 184.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.005325 s
-------------------------------------------
Time: ms
-------------------------------------------
// :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.090 s for time ms (execution: 0.066 s)
// :: INFO ShuffledRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
(D,)
(E,)
(F,) // :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO JobScheduler: Added jobs for time ms
// :: INFO JobScheduler: Starting job streaming job ms. from job set of time ms
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO DAGScheduler: Registering RDD (map at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List(ShuffleMapStage )
// :: INFO DAGScheduler: Submitting ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_138 stored as values in memory (estimated size 2.5 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_138_piece0 stored as bytes in memory (estimated size 1564.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_138_piece0 in memory on localhost: (size: 1564.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 185.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 185.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 185.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 185.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 1.0 in stage 185.0 (TID )
// :: INFO Executor: Running task 2.0 in stage 185.0 (TID )
// :: INFO Executor: Running task 0.0 in stage 185.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO BlockManager: Found block input-- locally
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 2.0 in stage 185.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 3.0 in stage 185.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Finished task 2.0 in stage 185.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Running task 3.0 in stage 185.0 (TID )
// :: INFO Executor: Finished task 1.0 in stage 185.0 (TID ). bytes result sent to driver
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 0.0 in stage 185.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 4.0 in stage 185.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 4.0 in stage 185.0 (TID )
// :: INFO TaskSetManager: Finished task 0.0 in stage 185.0 (TID ) in ms on localhost (/)
// :: INFO BlockManager: Found block input-- locally
// :: INFO TaskSetManager: Finished task 1.0 in stage 185.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 3.0 in stage 185.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 3.0 in stage 185.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 4.0 in stage 185.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 4.0 in stage 185.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 185.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ShuffleMapStage (map at NetWorkWordCount.scala:) finished in 0.041 s
// :: INFO DAGScheduler: looking for newly runnable stages
// :: INFO DAGScheduler: running: Set(ResultStage )
// :: INFO DAGScheduler: waiting: Set(ResultStage )
// :: INFO DAGScheduler: failed: Set()
// :: INFO DAGScheduler: Missing parents for ResultStage : List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which is now runnable
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_139 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_139_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_139_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 186.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 186.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 186.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 186.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 186.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 186.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.049675 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_140 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_140_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_140_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 188.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 188.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 188.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 188.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 188.0 (TID )
// :: INFO Executor: Running task 2.0 in stage 188.0 (TID )
// :: INFO Executor: Running task 1.0 in stage 188.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 188.0 (TID ). bytes result sent to driver
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO TaskSetManager: Finished task 0.0 in stage 188.0 (TID ) in ms on localhost (/)
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 1.0 in stage 188.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 188.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 2.0 in stage 188.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 2.0 in stage 188.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 188.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.006 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.009241 s
-------------------------------------------
Time: ms
-------------------------------------------
(B,)
(F,)
(G,)
(C,) // :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.069 s for time ms (execution: 0.063 s)
// :: INFO ShuffledRDD: Removing RDD from persistence list
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
// :: INFO BlockManager: Removing RDD
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO JobScheduler: Added jobs for time ms
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO JobScheduler: Starting job streaming job ms. from job set of time ms
// :: INFO DAGScheduler: Registering RDD (map at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List(ShuffleMapStage )
// :: INFO DAGScheduler: Submitting ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_141 stored as values in memory (estimated size 2.5 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_141_piece0 stored as bytes in memory (estimated size 1564.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_141_piece0 in memory on localhost: (size: 1564.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 189.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 189.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 189.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 189.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 2.0 in stage 189.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 0.0 in stage 189.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 1.0 in stage 189.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 2.0 in stage 189.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 3.0 in stage 189.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 3.0 in stage 189.0 (TID )
// :: INFO TaskSetManager: Finished task 2.0 in stage 189.0 (TID ) in ms on localhost (/)
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 0.0 in stage 189.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 189.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 3.0 in stage 189.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 3.0 in stage 189.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 1.0 in stage 189.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 189.0 (TID ) in ms on localhost (/)
// :: INFO DAGScheduler: ShuffleMapStage (map at NetWorkWordCount.scala:) finished in 0.012 s
// :: INFO DAGScheduler: looking for newly runnable stages
// :: INFO TaskSchedulerImpl: Removed TaskSet 189.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: running: Set(ResultStage )
// :: INFO DAGScheduler: waiting: Set(ResultStage )
// :: INFO DAGScheduler: failed: Set()
// :: INFO DAGScheduler: Missing parents for ResultStage : List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which is now runnable
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_142 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_142_piece0 stored as bytes in memory (estimated size 1413.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_142_piece0 in memory on localhost: (size: 1413.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 190.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 190.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 190.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 190.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 190.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 190.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.022178 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_143 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_143_piece0 stored as bytes in memory (estimated size 1413.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_143_piece0 in memory on localhost: (size: 1413.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 192.0 with tasks
// :: INFO ContextCleaner: Cleaned shuffle
// :: INFO ContextCleaner: Cleaned shuffle
// :: INFO TaskSetManager: Starting task 0.0 in stage 192.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 192.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 192.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 1.0 in stage 192.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO BlockManagerInfo: Removed broadcast_125_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO Executor: Running task 0.0 in stage 192.0 (TID )
// :: INFO Executor: Finished task 1.0 in stage 192.0 (TID ). bytes result sent to driver
// :: INFO Executor: Running task 2.0 in stage 192.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 192.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 2.0 in stage 192.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 192.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 0.0 in stage 192.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 2.0 in stage 192.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 192.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.006 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.021802 s
-------------------------------------------// :: INFO ContextCleaner: Cleaned shuffle Time: ms
-------------------------------------------
(A,)
(B,)
(C,) // :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.056 s for time ms (execution: 0.047 s)
// :: INFO ShuffledRDD: Removing RDD from persistence list
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockManagerInfo: Removed broadcast_126_piece0 on localhost: in memory (size: 1564.0 B, free: 467.6 MB)
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockManager: Removing RDD
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_127_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_128_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO ContextCleaner: Cleaned shuffle
// :: INFO BlockManagerInfo: Removed broadcast_129_piece0 on localhost: in memory (size: 1564.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_130_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_131_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO ContextCleaner: Cleaned shuffle
// :: INFO BlockManagerInfo: Removed broadcast_132_piece0 on localhost: in memory (size: 1563.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_142_piece0 on localhost: in memory (size: 1413.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_141_piece0 on localhost: in memory (size: 1564.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_140_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_139_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_138_piece0 on localhost: in memory (size: 1564.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_137_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_136_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_135_piece0 on localhost: in memory (size: 1564.0 B, free: 467.6 MB)
// :: INFO ContextCleaner: Cleaned shuffle
// :: INFO BlockManagerInfo: Removed broadcast_134_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed broadcast_133_piece0 on localhost: in memory (size: 1417.0 B, free: 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO JobScheduler: Added jobs for time ms
// :: INFO JobScheduler: Starting job streaming job ms. from job set of time ms
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO DAGScheduler: Registering RDD (map at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List(ShuffleMapStage )
// :: INFO DAGScheduler: Submitting ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_144 stored as values in memory (estimated size 2.5 KB, free 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_144_piece0 stored as bytes in memory (estimated size 1564.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added broadcast_144_piece0 in memory on localhost: (size: 1564.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 193.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 193.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 193.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 193.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 2.0 in stage 193.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 0.0 in stage 193.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 1.0 in stage 193.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 2.0 in stage 193.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 3.0 in stage 193.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 3.0 in stage 193.0 (TID )
// :: INFO TaskSetManager: Finished task 2.0 in stage 193.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 0.0 in stage 193.0 (TID ). bytes result sent to driver
// :: INFO BlockManager: Found block input-- locally
// :: INFO TaskSetManager: Starting task 4.0 in stage 193.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Finished task 0.0 in stage 193.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Running task 4.0 in stage 193.0 (TID )
// :: INFO Executor: Finished task 1.0 in stage 193.0 (TID ). bytes result sent to driver
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 3.0 in stage 193.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 193.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 3.0 in stage 193.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 4.0 in stage 193.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 4.0 in stage 193.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 193.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ShuffleMapStage (map at NetWorkWordCount.scala:) finished in 0.011 s
// :: INFO DAGScheduler: looking for newly runnable stages
// :: INFO DAGScheduler: running: Set(ResultStage )
// :: INFO DAGScheduler: waiting: Set(ResultStage )
// :: INFO DAGScheduler: failed: Set()
// :: INFO DAGScheduler: Missing parents for ResultStage : List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which is now runnable
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_145 stored as values in memory (estimated size 2.3 KB, free 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_145_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added broadcast_145_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 194.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 194.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 194.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 194.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 194.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 194.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.005 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.026568 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_146 stored as values in memory (estimated size 2.3 KB, free 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_146_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added broadcast_146_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 196.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 196.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 196.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 196.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 1.0 in stage 196.0 (TID )
// :: INFO Executor: Running task 0.0 in stage 196.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 1.0 in stage 196.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 0.0 in stage 196.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 196.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Running task 2.0 in stage 196.0 (TID )
// :: INFO TaskSetManager: Finished task 0.0 in stage 196.0 (TID ) in ms on localhost (/)
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 2.0 in stage 196.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 2.0 in stage 196.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 196.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.004 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.007002 s
-------------------------------------------
Time: ms
-------------------------------------------
// :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.043 s for time ms (execution: 0.037 s)
// :: INFO ShuffledRDD: Removing RDD from persistence list
(D,)
(G,) // :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO JobScheduler: Added jobs for time ms
// :: INFO JobScheduler: Starting job streaming job ms. from job set of time ms
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO DAGScheduler: Registering RDD (map at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List(ShuffleMapStage )
// :: INFO DAGScheduler: Submitting ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_147 stored as values in memory (estimated size 2.5 KB, free 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_147_piece0 stored as bytes in memory (estimated size 1565.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added broadcast_147_piece0 in memory on localhost: (size: 1565.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 197.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 197.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 197.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 197.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 2.0 in stage 197.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 0.0 in stage 197.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 1.0 in stage 197.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 2.0 in stage 197.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 3.0 in stage 197.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 3.0 in stage 197.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO TaskSetManager: Finished task 2.0 in stage 197.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 1.0 in stage 197.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 4.0 in stage 197.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Finished task 1.0 in stage 197.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Running task 4.0 in stage 197.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 3.0 in stage 197.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 3.0 in stage 197.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 0.0 in stage 197.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 197.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 4.0 in stage 197.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 4.0 in stage 197.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 197.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ShuffleMapStage (map at NetWorkWordCount.scala:) finished in 0.028 s
// :: INFO DAGScheduler: looking for newly runnable stages
// :: INFO DAGScheduler: running: Set(ResultStage )
// :: INFO DAGScheduler: waiting: Set(ResultStage )
// :: INFO DAGScheduler: failed: Set()
// :: INFO DAGScheduler: Missing parents for ResultStage : List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which is now runnable
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_148 stored as values in memory (estimated size 2.3 KB, free 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_148_piece0 stored as bytes in memory (estimated size 1415.0 B, free 467.6 MB)
// :: INFO BlockManagerInfo: Added broadcast_148_piece0 in memory on localhost: (size: 1415.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 198.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 198.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 198.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 198.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 198.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 198.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.039103 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_149 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_149_piece0 stored as bytes in memory (estimated size 1415.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_149_piece0 in memory on localhost: (size: 1415.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 200.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 200.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 200.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 200.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 2.0 in stage 200.0 (TID )
// :: INFO Executor: Running task 1.0 in stage 200.0 (TID )
// :: INFO Executor: Running task 0.0 in stage 200.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 2.0 in stage 200.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 0.0 in stage 200.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 1.0 in stage 200.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 200.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 2.0 in stage 200.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 1.0 in stage 200.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 200.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.003 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.005057 s
-------------------------------------------
Time: ms
-------------------------------------------
(D,)
(A,)
(F,)
(G,) // :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.061 s for time ms (execution: 0.048 s)
// :: INFO ShuffledRDD: Removing RDD from persistence list
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO JobScheduler: Added jobs for time ms
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO JobScheduler: Starting job streaming job ms. from job set of time ms
// :: INFO DAGScheduler: Registering RDD (map at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List(ShuffleMapStage )
// :: INFO DAGScheduler: Submitting ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_150 stored as values in memory (estimated size 2.5 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_150_piece0 stored as bytes in memory (estimated size 1564.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_150_piece0 in memory on localhost: (size: 1564.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 201.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 201.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 201.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 201.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 2.0 in stage 201.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 0.0 in stage 201.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Running task 1.0 in stage 201.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 2.0 in stage 201.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 3.0 in stage 201.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Finished task 2.0 in stage 201.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Running task 3.0 in stage 201.0 (TID )
// :: INFO Executor: Finished task 0.0 in stage 201.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 4.0 in stage 201.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Finished task 0.0 in stage 201.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Running task 4.0 in stage 201.0 (TID )
// :: INFO Executor: Finished task 1.0 in stage 201.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 201.0 (TID ) in ms on localhost (/)
// :: INFO BlockManager: Found block input-- locally
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 4.0 in stage 201.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 3.0 in stage 201.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 4.0 in stage 201.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 3.0 in stage 201.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 201.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ShuffleMapStage (map at NetWorkWordCount.scala:) finished in 0.018 s
// :: INFO DAGScheduler: looking for newly runnable stages
// :: INFO DAGScheduler: running: Set(ResultStage )
// :: INFO DAGScheduler: waiting: Set(ResultStage )
// :: INFO DAGScheduler: failed: Set()
// :: INFO DAGScheduler: Missing parents for ResultStage : List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which is now runnable
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_151 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_151_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_151_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 202.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 202.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 202.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 202.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 202.0 (TID ) in ms on localhost (/)
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO TaskSchedulerImpl: Removed TaskSet 202.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.028394 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_152 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_152_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_152_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 204.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 204.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 204.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 204.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 204.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 204.0 (TID ). bytes result sent to driver
// :: INFO Executor: Running task 2.0 in stage 204.0 (TID )
// :: INFO Executor: Running task 1.0 in stage 204.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO TaskSetManager: Finished task 0.0 in stage 204.0 (TID ) in ms on localhost (/)
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO Executor: Finished task 1.0 in stage 204.0 (TID ). bytes result sent to driver
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO TaskSetManager: Finished task 1.0 in stage 204.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 2.0 in stage 204.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 2.0 in stage 204.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 204.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.006 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.009907 s
-------------------------------------------
Time: ms
-------------------------------------------
// :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.054 s for time ms (execution: 0.046 s)
// :: INFO ShuffledRDD: Removing RDD from persistence list
(D,)
(B,)
(F,)
(C,) // :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block input-- stored as bytes in memory (estimated size 8.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added input-- in memory on localhost: (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockGenerator: Pushed block input--
// :: INFO JobScheduler: Added jobs for time ms
// :: INFO JobScheduler: Starting job streaming job ms. from job set of time ms
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO DAGScheduler: Registering RDD (map at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List(ShuffleMapStage )
// :: INFO DAGScheduler: Submitting ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_153 stored as values in memory (estimated size 2.5 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_153_piece0 stored as bytes in memory (estimated size 1564.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_153_piece0 in memory on localhost: (size: 1564.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ShuffleMapStage (MapPartitionsRDD[] at map at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 205.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 205.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 205.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 205.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 205.0 (TID )
// :: INFO Executor: Running task 1.0 in stage 205.0 (TID )
// :: INFO Executor: Running task 2.0 in stage 205.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO BlockManager: Found block input-- locally
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 0.0 in stage 205.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 3.0 in stage 205.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 3.0 in stage 205.0 (TID )
// :: INFO TaskSetManager: Finished task 0.0 in stage 205.0 (TID ) in ms on localhost (/)
// :: INFO BlockManager: Found block input-- locally
// :: INFO Executor: Finished task 2.0 in stage 205.0 (TID ). bytes result sent to driver
// :: INFO Executor: Finished task 3.0 in stage 205.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Starting task 4.0 in stage 205.0 (TID , localhost, NODE_LOCAL, bytes)
// :: INFO Executor: Running task 4.0 in stage 205.0 (TID )
// :: INFO BlockManager: Found block input-- locally
// :: INFO TaskSetManager: Finished task 2.0 in stage 205.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 3.0 in stage 205.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 4.0 in stage 205.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 4.0 in stage 205.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 1.0 in stage 205.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 1.0 in stage 205.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 205.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ShuffleMapStage (map at NetWorkWordCount.scala:) finished in 0.015 s
// :: INFO DAGScheduler: looking for newly runnable stages
// :: INFO DAGScheduler: running: Set(ResultStage )
// :: INFO DAGScheduler: waiting: Set(ResultStage )
// :: INFO DAGScheduler: failed: Set()
// :: INFO DAGScheduler: Missing parents for ResultStage : List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which is now runnable
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_154 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_154_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_154_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 206.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 206.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 0.0 in stage 206.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 0.0 in stage 206.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 0.0 in stage 206.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 206.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.002 s
// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.025503 s
// :: INFO SparkContext: Starting job: print at NetWorkWordCount.scala:
// :: INFO MapOutputTrackerMaster: Size of output statuses for shuffle is bytes
// :: INFO DAGScheduler: Got job (print at NetWorkWordCount.scala:) with output partitions (allowLocal=true)
// :: INFO DAGScheduler: Final stage: ResultStage (print at NetWorkWordCount.scala:)
// :: INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage )
// :: INFO DAGScheduler: Missing parents: List()
// :: INFO DAGScheduler: Submitting ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:), which has no missing parents
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_155 stored as values in memory (estimated size 2.3 KB, free 467.5 MB)
// :: INFO MemoryStore: ensureFreeSpace() called with curMem=, maxMem=
// :: INFO MemoryStore: Block broadcast_155_piece0 stored as bytes in memory (estimated size 1417.0 B, free 467.5 MB)
// :: INFO BlockManagerInfo: Added broadcast_155_piece0 in memory on localhost: (size: 1417.0 B, free: 467.6 MB)
// :: INFO SparkContext: Created broadcast from broadcast at DAGScheduler.scala:
// :: INFO DAGScheduler: Submitting missing tasks from ResultStage (ShuffledRDD[] at reduceByKey at NetWorkWordCount.scala:)
// :: INFO TaskSchedulerImpl: Adding task set 208.0 with tasks
// :: INFO TaskSetManager: Starting task 0.0 in stage 208.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 1.0 in stage 208.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO TaskSetManager: Starting task 2.0 in stage 208.0 (TID , localhost, PROCESS_LOCAL, bytes)
// :: INFO Executor: Running task 1.0 in stage 208.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 1.0 in stage 208.0 (TID ). bytes result sent to driver
// :: INFO Executor: Running task 2.0 in stage 208.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO Executor: Finished task 2.0 in stage 208.0 (TID ). bytes result sent to driver
// :: INFO Executor: Running task 0.0 in stage 208.0 (TID )
// :: INFO ShuffleBlockFetcherIterator: Getting non-empty blocks out of blocks
// :: INFO ShuffleBlockFetcherIterator: Started remote fetches in ms
// :: INFO TaskSetManager: Finished task 1.0 in stage 208.0 (TID ) in ms on localhost (/)
// :: INFO Executor: Finished task 0.0 in stage 208.0 (TID ). bytes result sent to driver
// :: INFO TaskSetManager: Finished task 2.0 in stage 208.0 (TID ) in ms on localhost (/)
// :: INFO TaskSetManager: Finished task 0.0 in stage 208.0 (TID ) in ms on localhost (/)
// :: INFO TaskSchedulerImpl: Removed TaskSet 208.0, whose tasks have all completed, from pool
// :: INFO DAGScheduler: ResultStage (print at NetWorkWordCount.scala:) finished in 0.008 s
-------------------------------------------// :: INFO DAGScheduler: Job finished: print at NetWorkWordCount.scala:, took 0.010832 s
// :: INFO JobScheduler: Finished job streaming job ms. from job set of time ms
// :: INFO JobScheduler: Total delay: 0.051 s for time ms (execution: 0.042 s) Time: ms
-------------------------------------------
(A,)
(B,)
(F,)
(C,) // :: INFO ShuffledRDD: Removing RDD from persistence list
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockManager: Removing RDD
// :: INFO MapPartitionsRDD: Removing RDD from persistence list
// :: INFO BlockRDD: Removing RDD from persistence list
// :: INFO SocketInputDStream: Removing blocks of RDD BlockRDD[] at socketTextStream at NetWorkWordCount.scala: of time ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManager: Removing RDD
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)
// :: INFO BlockManager: Removing RDD
// :: INFO BlockManager: Removing RDD
// :: INFO ReceivedBlockTracker: Deleting batches ArrayBuffer( ms)
// :: INFO InputInfoTracker: remove old batch metadata: ms
// :: INFO BlockManagerInfo: Removed input-- on localhost: in memory (size: 8.0 B, free: 467.6 MB)

spark stream001的更多相关文章

  1. Spark踩坑记——Spark Streaming+Kafka

    [TOC] 前言 在WeTest舆情项目中,需要对每天千万级的游戏评论信息进行词频统计,在生产者一端,我们将数据按照每天的拉取时间存入了Kafka当中,而在消费者一端,我们利用了spark strea ...

  2. Spark RDD 核心总结

    摘要: 1.RDD的五大属性 1.1 partitions(分区) 1.2 partitioner(分区方法) 1.3 dependencies(依赖关系) 1.4 compute(获取分区迭代列表) ...

  3. spark处理大规模语料库统计词汇

    最近迷上了spark,写一个专门处理语料库生成词库的项目拿来练练手, github地址:https://github.com/LiuRoy/spark_splitter.代码实现参考wordmaker ...

  4. Hive on Spark安装配置详解(都是坑啊)

    个人主页:http://www.linbingdong.com 简书地址:http://www.jianshu.com/p/a7f75b868568 简介 本文主要记录如何安装配置Hive on Sp ...

  5. Spark踩坑记——数据库(Hbase+Mysql)

    [TOC] 前言 在使用Spark Streaming的过程中对于计算产生结果的进行持久化时,我们往往需要操作数据库,去统计或者改变一些值.最近一个实时消费者处理任务,在使用spark streami ...

  6. Spark踩坑记——初试

    [TOC] Spark简介 整体认识 Apache Spark是一个围绕速度.易用性和复杂分析构建的大数据处理框架.最初在2009年由加州大学伯克利分校的AMPLab开发,并于2010年成为Apach ...

  7. Spark读写Hbase的二种方式对比

    作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 一.传统方式 这种方式就是常用的TableInputFormat和TableOutputForm ...

  8. (资源整理)带你入门Spark

    一.Spark简介: 以下是百度百科对Spark的介绍: Spark 是一种与 Hadoop 相似的开源集群计算环境,但是两者之间还存在一些不同之处,这些有用的不同之处使 Spark 在某些工作负载方 ...

  9. Spark的StandAlone模式原理和安装、Spark-on-YARN的理解

    Spark是一个内存迭代式运算框架,通过RDD来描述数据从哪里来,数据用那个算子计算,计算完的数据保存到哪里,RDD之间的依赖关系.他只是一个运算框架,和storm一样只做运算,不做存储. Spark ...

随机推荐

  1. delphi传递变量给fastreport

    delphi传递变量给fastreport   1.打开frReport报表设计.2.打开file->data dictionary加变量.这里比如加title,bm,zbr,gj,zrs3.在 ...

  2. LeetCode 429 N-ary Tree Level Order Traversal 解题报告

    题目要求 Given an n-ary tree, return the level order traversal of its nodes' values. (ie, from left to r ...

  3. 《mongoDB》概念-数据类型

    一:概念 - mongoDB 是一个面向文档的数据库,而不是关系型数据库. - 摘自<mongoDB 权威指南 第2版>第3页 二:数据类型 - null - 用于表示空值或者不存在的字段 ...

  4. vue脚手架 构建豆瓣App 第一天

    课堂笔记: 项目结构分析: 项目入口:index.html(div#app) 全局vue组件:App.vue(template:div#app) 通过相同id的div,index.html与Appvu ...

  5. java 网络编程(四)TCP通讯

    客户端: package cn.sasa.TcpDemo; import java.io.IOException; import java.io.InputStream; import java.io ...

  6. mysql 权限管理 目录

    mysql 权限管理介绍 mysql 权限管理 记录 mysql 权限管理 grant 命令 mysql 权限管理 revoke 回收权限 命令 mysql 权限管理 针对库 授权 db.* mysq ...

  7. iOS App让自己的应用在其它应用中打开列表中显示

    像百度网盘等应用,里面的文件打开时,都能够通过以下应用再打开文件.以下红色框框内的我的jpg就是我做的一个样例. 由于样例没有提供Icon,所以显示的是默认icon.   以下就是这样例的主要步骤和代 ...

  8. 我想要得那块牌—记烟台大学第一届"ACM讲堂"

    版权声明:本文为博主原创文章,未经博主同意不得转载. https://blog.csdn.net/sr19930829/article/details/26812621           2014年 ...

  9. kafka5 编写简单生产者

    一 客户端 1.打开eclipse,新建maven项目(new-->other-->Maven Project-->Artifact Id设为mykafka). 2.配置Build ...

  10. css中根据不同分辨率设置不同样式

    @media screen and (max-width: ) { .tab__content{width: %} }