过程,

Spout 发送msgid 1-10

一级Bolt, msgid1的tuple做为基本组合tuple, 其他8个和一组合, 然后发送给二级Bolt, 同时单个msgid对应的tuple都ack一次,msgid1对象tuple, acker将会跟踪8个二级bolt处理情况.

二级Bolt,发送ack fail(模拟处理失败)

结果:在spout fail下出现msg1-9都失败的情况 .

拓扑代码

package storm.starter;

import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.LocalDRPC;
import backtype.storm.StormSubmitter;
import backtype.storm.drpc.DRPCSpout;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.ShellBolt;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import storm.starter.spout.RandomSentenceSpout; import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map; import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator; /**
* This topology demonstrates Storm's stream groupings and multilang
* capabilities.
*/
public class WordCountTopology {
public static String GetThreadName() {
Thread thread = Thread.currentThread();
return thread.getName();
} public static final Logger logger = Logger
.getLogger(WordCountTopology.class); // 切分单词 一级bolt
/*
* public static class SplitSentence extends ShellBolt implements IRichBolt
* { public SplitSentence() { super("python", "splitsentence.py");
* logger.error(GetThreadName() + "SplitSentence create"); }
*
* // 定义字段发送
*
* @Override public void declareOutputFields(OutputFieldsDeclarer declarer)
* { declarer.declare(new Fields("word")); logger.error(GetThreadName() +
* "declarer.declare(new Fields(word))"); }
*
* @Override public Map<String, Object> getComponentConfiguration() {
* logger.error("getComponentConfiguration"); return null; } }
*/
public static class SplitSentence implements IRichBolt {
private OutputCollector _collector; int num = 0;
@Override
public void prepare(Map stormConf, TopologyContext context,
OutputCollector collector) {
_collector = collector;
} private Tuple tuple1;
@Override
public void execute(Tuple tuple) {
String sentence = tuple.getString(0);
if(sentence.equals("a")) {
tuple1 = tuple;
}
else{
List<Tuple> anchors = new ArrayList<Tuple>();
anchors.add(tuple1);
anchors.add(tuple);
_collector.emit(anchors, new Values(sentence + "a"));
_collector.ack(tuple);
_collector.ack(tuple1);
} // for (String word : sentence.split(" ")){
// _collector.emit(tuple, new Values(word));
// }
// num++; System.out.println("Bolt Thread " + Thread.currentThread().getName() + "recve : " + sentence);
System.out.println( num + " bolt recev:" + tuple.getMessageId().getAnchorsToIds());
} @Override
public void cleanup() {
} @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("word"));
} @Override
public Map<String, Object> getComponentConfiguration() {
// TODO Auto-generated method stub
return null;
}
} public static class CountCount1 implements IRichBolt {
Map<String, Integer> counts = new HashMap<String, Integer>();
private OutputCollector _collector;
int num = 0; @Override
public void execute(Tuple tuple) {
String word = tuple.getString(0);
//logger.error(this.toString() + "word = " + word);
Integer count = counts.get(word);
if (count == null)
count = 0; count++;
counts.put(word, count);
num++; _collector.fail(tuple);
//_collector.ack(tuple); //_collector.emit(tuple, new Values(word, count));
} @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// logger.error("declareOutputFields :");
declarer.declare(new Fields("result", "count"));
} @Override
public void prepare(Map stormConf, TopologyContext context,
OutputCollector collector) {
// TODO Auto-generated method stub
_collector = collector;
} @Override
public void cleanup() {
// TODO Auto-generated method stub } @Override
public Map<String, Object> getComponentConfiguration() {
// TODO Auto-generated method stub
return null;
}
} public static class WordCount extends BaseBasicBolt {
private OutputCollector _collector;
Map<String, Integer> counts = new HashMap<String, Integer>(); @Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
String word = tuple.getString(0);
//logger.error(this.toString() + "word = " + word);
Integer count = counts.get(word);
if (count == null)
count = 0;
count++;
counts.put(word, count); // <key, list<value, count> >
//logger.error(this.toString() + "count = " + count);
collector.emit(new Values(word, count));
} @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// logger.error("declareOutputFields :");
declarer.declare(new Fields("result", "count"));
}
} public static class WordCount1 extends BaseBasicBolt {
Map<String, Integer> counts = new HashMap<String, Integer>(); @Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
// logger.error("WordCount1");
// tuple.getFields()[0];
if (tuple.getFields().contains("result")) {
String count = (String) tuple.getValueByField("result");
// tuple.getValueByField(field)
long countl = -0;// = Long.valueOf(count);
// logger.error(this.toString() + " key = resultkey " + count);
} if (tuple.getFields().contains("count")) {
Integer count = (Integer) tuple.getValueByField("count");
// tuple.getValueByField(field)
long countl = -0;// = Long.valueOf(count);
//logger.error(this.toString() + " key = count " + count);
} // String word = tuple.getString(0);
// logger.error(this.toString() +"word = " + word);
// Integer count = counts.get(word);
// if (count == null)
// count = 0;
// count++;
// counts.put(word, count);
// logger.error(this.toString() + "count = " + count);
// collector.emit(new Values(word, count));
} @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// logger.error("declareOutputFields :");
declarer.declare(new Fields("word1", "count1"));
}
} public static void main(String[] args) throws Exception {
TopologyBuilder builder = new TopologyBuilder(); PropertyConfigurator
.configure("/home/hadoop/code1/Kafka/src/Log4j.properties"); // parallelism_hint 代表是executor数量, setNumTasks 代表Tasks数量
builder.setSpout("spout", new RandomSentenceSpout(), 5).setNumTasks(2); builder.setBolt("split", new SplitSentence(), 8).setNumTasks(1).shuffleGrouping("spout");
builder.setBolt("count", new CountCount1(), 12).fieldsGrouping("split",
new Fields("word"));
// builder.setBolt("WordCount1", new WordCount1(), 1).fieldsGrouping(
// "count", new Fields("result", "count")); Config conf = new Config();
conf.setDebug(true);
                //  这个设置一个spout task上面最多有多少个没有处理(ack/fail)的tuple,防止tuple队列过大, 只对可靠任务起作用
conf.setMaxSpoutPending(2);
conf.setMessageTimeoutSecs(5); // 消息处理延时
conf.setNumAckers(2); // 消息处理acker System.out.println("args.length = " + args.length);
if (args != null && args.length > 0) {
conf.setNumWorkers(5); // 设置工作进程
StormSubmitter.submitTopology(args[0], conf,
builder.createTopology());
} else {
// 每个组件的最大executor数
conf.setMaxTaskParallelism(1);
conf.setDebug(true); LocalCluster cluster = new LocalCluster();
cluster.submitTopology("word-count", conf, builder.createTopology()); String str = "testdrpc";
// drpc.execute("testdrpc", str); Thread.sleep(1088000);
cluster.shutdown();
}
}
}

spout代码

package storm.starter.spout;

import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import backtype.storm.utils.Utils; import java.util.Map;
import java.util.Random; import org.apache.log4j.Logger; import storm.starter.WordCountTopology; // IRichSpout
public class RandomSentenceSpout extends BaseRichSpout {
SpoutOutputCollector _collector;
Random _rand; public static final Logger logger = Logger
.getLogger(RandomSentenceSpout.class); @Override
public void open(Map conf, TopologyContext context,
SpoutOutputCollector collector) {
_collector = collector;
_rand = new Random(); WordCountTopology.logger.error(this.toString()
+ "RandomSentenceSpout is create");
} private int num = 0; private String gettmstr() {
StringBuilder tmp = new StringBuilder();
for (int i = 0; i <= num; i++)
tmp.append("a");
num++;
return tmp.toString();
} @Override
public void nextTuple() {
Utils.sleep(200);
// String[] sentences = new String[]{ "the cow jumped over the moon",
// "an apple a day keeps the doctor away",
// "four score and seven years ago", "snow white and the seven dwarfs",
// "i am at two with nature" };
String[] sentences = new String[] { "A" }; String sentence = gettmstr(); // sentences[_rand.nextInt(sentences.length)];
if (num < 10) {
_collector.emit(new Values(sentence), new Integer(num));
// logger.error(this.toString() + "send sentence = " + sentence);
// System.out.println(Thread.currentThread().getName() + " Spout ");
}
} @Override
public void ack(Object id) {
logger.error(this.toString() + "spout ack =" + (Integer)id);
} @Override
public void fail(Object id) {
logger.error("spout fail =" + (Integer)id);
} @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("word"));
} }

运行结果

2014-10-03 21:17:31,149 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =1
2014-10-03 21:17:31,351 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =2
Bolt Thread Thread-22recve : aaa
0 bolt recev:{-3139141336114052337=7131499433188364504}
2014-10-03 21:17:31,552 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =3
Bolt Thread Thread-22recve : aaaa
0 bolt recev:{-4497680640148241887=-615828348570847097}
2014-10-03 21:17:31,754 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =4
Bolt Thread Thread-22recve : aaaaa
0 bolt recev:{-8878772617767839827=-7708082520013359311}
2014-10-03 21:17:31,957 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =5
Bolt Thread Thread-22recve : aaaaaa
0 bolt recev:{-3995020874692495577=-5070846720162762196}
2014-10-03 21:17:32,160 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =6
Bolt Thread Thread-22recve : aaaaaaa
0 bolt recev:{-5994700617723404155=-3738685841476816404}
2014-10-03 21:17:32,362 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =7
Bolt Thread Thread-22recve : aaaaaaaa
0 bolt recev:{-2308734827213127682=-5719708045753233056}
2014-10-03 21:17:32,563 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =8
Bolt Thread Thread-22recve : aaaaaaaaa
0 bolt recev:{-3718844156917119468=-6359724009048981605}
2014-10-03 21:17:32,766 ERROR (storm.starter.spout.RandomSentenceSpout:71) - spout fail =9

测试Storm的多源头锚定的更多相关文章

  1. 测试storm异常信息(时时更新)

    Exception in thread "main" java.lang.RuntimeException: org.apache.thrift7.protocol.TProtoc ...

  2. 安装storm的一些很乱的笔记

    下面是自己安装和测试storm的一些笔记,比较乱,后续有时间在整理一篇. storm jar all-my-code.jar com.storm.MyTopology arg1 arg2这个命令会运行 ...

  3. Storm on Yarn :原理分析+平台搭建

    Storm on YARN: Storm on YARN被视为大规模Web应用与传统企业应用之间的桥梁.它将Storm事件处理平台与YARN(Yet Another Resource Negotiat ...

  4. storm源码之storm代码结构【译】【转】

    [原]storm源码之storm代码结构[译]  说明:本文翻译自Storm在GitHub上的官方Wiki中提供的Storm代码结构描述一节Structure of the codebase,希望对正 ...

  5. 【原】storm源码之storm代码结构【译】

    说明:本文翻译自Storm在GitHub上的官方Wiki中提供的Storm代码结构描述一节Structure of the codebase,希望对正在基于Storm进行源码级学习和研究的朋友有所帮助 ...

  6. storm源码之storm代码结构【译】

    storm源码之storm代码结构[译] 说明:本文翻译自Storm在GitHub上的官方Wiki中提供的Storm代码结构描述一节Structure of the codebase,希望对正在基于S ...

  7. Storm的部署

    配置方案如下 node1 Nimbus zookeeper node2 Supervisor zookeeper node3 Supervisor zookeeper node4 Supervisor ...

  8. Storm开发——环境配置部署

    配置开发环境:http://storm.apache.org/releases/current/Setting-up-development-environment.html 开发环境定义: Stor ...

  9. flume+kafka+storm+mysql架构设计

    前段时间学习了storm,最近刚开blog,就把这些资料放上来供大家参考. 这个框架用的组件基本都是最新稳定版本,flume-ng1.4+kafka0.8+storm0.9+mysql (项目是mav ...

随机推荐

  1. SVN创建资源库和远程连接配置

    SVN创建资源库和远程连接配置 本机安装的是TortoiseSVN-1.7.5.22551-win32-svn-1.7.3.msi 安装好后会在鼠标右键中出现如图最后两项的选项: 创建svn资源库: ...

  2. [shell基础]——echo命令

    echo命令:在shell中主要用于输出 1. -n     不换行的显示结果(默认是换行的) 2. -e " "  支持双引号中使用一些特殊字符 常用的特殊字符有 \a 发出警告 ...

  3. 注解实现AOP

    package com.dch.service.aop; import java.text.SimpleDateFormat; import java.util.Arrays; import java ...

  4. Lucene 初识

    因为业务需要,虽然自己不是专门写搜索的,但是需要自己拼一些搜索条件去调用搜索的接口,而之前看的JVM crash里也涉及到了Lucene,所以大概了解一下. 参考文档: http://www.itey ...

  5. html相对定位绝对定位

    孔子说:“温故而知新,可以为师矣.”这几天参加了一个免费的前端课,每天晚上都有直播,讲解一个独立的案例.在听前端基础的时候,发现自己有不少东西没学会,平时在学校虽说html也写了不少,但有好大一部分都 ...

  6. html5 嵌入元素 img map areaiframe embed meter object meter

    <img src="路径">            src 路径可以是img.jpg 也可以是 绝对和相对路径+img.jpg <img src="路径 ...

  7. String字符串操作

    char chars[] ={'a','b','c'}; String s = new String(chars); int len = s.length();//字符串长度 System.out.p ...

  8. Stage3--Python控制流程及函数

    说在前面: Stage1-Stage4简单介绍一下Python语法,Stage5开始用python实现一些实际应用,语法的东西到处可以查看到,学习一门程序语言的最终目的是应用,而不是学习语法,语法本事 ...

  9. 《ArcGIS Runtime SDK for Android开发笔记》——问题集:Error:Error: File path too long on Windows, keep below 240 characters

    1.前言 在使用Android Studio开发环境时,经常会爆出以下错误,虽然具体细节内容各有不同,但是说明的都是同一个问题,在windows中使用过长的路径,超过240字符. Error:Erro ...

  10. Enumeration 接口的使用

          Enumeration是一个接口,定义了两个规则,可以获取连续的数据,对数据结构非常重要.       接口源码: publicinterfaceEnumeration<E>{ ...