目的:通过kafka输出的信息进行过滤,添加指定的字段后,进行打印

SentenceSpout:

package Trident;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties; import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values; import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties; /**
* 从kafka获取数据 spout发射
* @author BFD-593
*
*/
public class SentenceSpout extends BaseRichSpout{
//TODO
private SpoutOutputCollector collector;
private ConsumerConnector consumer;
private int index=0;
@Override
public void nextTuple() {
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put("helloworld", new Integer(1)); StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
Map<String, List<KafkaStream<String, String>>> consumerMap =
consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
KafkaStream<String, String> stream = consumerMap.get("helloworld").get(0);
ConsumerIterator<String, String> it = stream.iterator(); int messageCount = 0;
while (it.hasNext()){
String string = it.next().message().toString()+" 1"+" 2";
String name = string.split(" ")[0];
String value = string.split(" ")[1]==null?"":string.split(" ")[1];
String value2= string.split(" ")[2]==null?"":string.split(" ")[2];
this.collector.emit(new Values(name,value,value2));
}
} @Override
public void open(Map map, TopologyContext context, SpoutOutputCollector collector) {
this.collector = collector;
Properties props = new Properties();
// zookeeper 配置
props.put("zookeeper.connect", "192.168.170.185:2181"); // 消费者所在组
props.put("group.id", "testgroup"); // zk连接超时
props.put("zookeeper.session.timeout.ms", "4000");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "1000");
props.put("auto.offset.reset", "smallest"); // 序列化类
props.put("serializer.class", "kafka.serializer.StringEncoder"); ConsumerConfig config = new ConsumerConfig(props);
this.consumer = Consumer.createJavaConsumerConnector(config);
} @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
Fields field = new Fields("name", "sentence","sentence2");
declarer.declare(field);
} }

FunctionBolt:

	package Trident;

	import org.apache.storm.trident.operation.BaseFunction;
import org.apache.storm.trident.operation.TridentCollector;
import org.apache.storm.trident.tuple.TridentTuple;
import org.apache.storm.tuple.Values;
/**
* trident的函数操作:将spout发射的数据,添加一个fileds gender的
* 它不会替换掉原来的tuple
* @author BFD-593
*
*/
public class FunctionBolt extends BaseFunction{ @Override
public void execute(TridentTuple tuple, TridentCollector collector) {
String str = tuple.getStringByField("name");
if(str.equals("a")){
collector.emit(new Values("男"));
}else{
collector.emit(new Values("女"));
}
} }

MyFilter:

package Trident;

import java.util.Map;

import org.apache.storm.trident.operation.BaseFilter;
import org.apache.storm.trident.operation.TridentOperationContext;
import org.apache.storm.trident.tuple.TridentTuple;
/**
* trident的过滤操作:将spout的发送的tuple,过滤掉fields0是a并且fields1是b的tuple
* @author BFD-593
*
*/
public class MyFilter extends BaseFilter{
private TridentOperationContext context; @Override
public void prepare(Map conf, TridentOperationContext context) {
super.prepare(conf, context);
this.context = context;
}
@Override
public boolean isKeep(TridentTuple tuple) {
String name = tuple.getStringByField("name");
String value = tuple.getStringByField("sentence");
return (!"a".equals(name))||(!"b".equals(value));
} }

PrintFilter:

package Trident;

import java.util.Iterator;
import java.util.Map; import org.apache.storm.trident.operation.BaseFilter;
import org.apache.storm.trident.operation.TridentOperationContext;
import org.apache.storm.trident.tuple.TridentTuple;
import org.apache.storm.tuple.Fields;
/**
* 过滤打印所有的fields以及值
* @author BFD-593
*
*/
public class PrintFilter extends BaseFilter{
private TridentOperationContext context = null; @Override
public void prepare(Map conf, TridentOperationContext context) {
super.prepare(conf, context);
this.context = context;
} @Override
public boolean isKeep(TridentTuple tuple) {
Fields fields = tuple.getFields();
Iterator<String> iterator = fields.iterator();
String str = "";
while(iterator.hasNext()){
String next = iterator.next();
Object value = tuple.getValueByField(next);
str = str + next +":"+ value+",";
}
System.out.println(str);
return true;
} }

TopologyTrident:

package Trident;

import org.apache.kafka.common.utils.Utils;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.trident.TridentTopology;
import org.apache.storm.trident.operation.builtin.Count;
import org.apache.storm.tuple.Fields;
/**
* trident的过滤操作、函数操作、分驱聚合操作
* @author BFD-593
*
*/
public class TopologyTrident {
public static void main(String[] args) {
SentenceSpout spout = new SentenceSpout(); TridentTopology topology = new TridentTopology();
topology.newStream("spout", spout).each(new Fields("name"),new FunctionBolt(),new Fields("gender")).each(new Fields("name","sentence"), new MyFilter())
.each(new Fields("name","sentence","sentence2","gender"), new PrintFilter()); Config conf = new Config(); LocalCluster clu = new LocalCluster();
clu.submitTopology("mytopology", conf, topology.build()); Utils.sleep(100000000);
clu.killTopology("mytopology");
clu.shutdown(); }
}

  

package Trident;
import java.util.HashMap;import java.util.List;import java.util.Map;import java.util.Properties;
import org.apache.storm.spout.SpoutOutputCollector;import org.apache.storm.task.TopologyContext;import org.apache.storm.topology.OutputFieldsDeclarer;import org.apache.storm.topology.base.BaseRichSpout;import org.apache.storm.tuple.Fields;import org.apache.storm.tuple.Values;
import kafka.consumer.Consumer;import kafka.consumer.ConsumerConfig;import kafka.consumer.ConsumerIterator;import kafka.consumer.KafkaStream;import kafka.javaapi.consumer.ConsumerConnector;import kafka.serializer.StringDecoder;import kafka.utils.VerifiableProperties;
/** * 从kafka获取数据 spout发射 * @author BFD-593 * */public class SentenceSpout extends BaseRichSpout{//TODOprivate SpoutOutputCollector collector;private ConsumerConnector consumer;private int index=0;@Overridepublic void nextTuple() {Map<String, Integer> topicCountMap = new HashMap<String, Integer>();          topicCountMap.put("helloworld", new Integer(1));            StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());          StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());          Map<String, List<KafkaStream<String, String>>> consumerMap =           consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);          KafkaStream<String, String> stream = consumerMap.get("helloworld").get(0);          ConsumerIterator<String, String> it = stream.iterator();                   int messageCount = 0;          while (it.hasNext()){          String string = it.next().message().toString()+" 1"+" 2";        String name = string.split(" ")[0];        String value = string.split(" ")[1]==null?"":string.split(" ")[1];        String value2= string.split(" ")[2]==null?"":string.split(" ")[2];            this.collector.emit(new Values(name,value,value2));        }  }
@Overridepublic void open(Map map, TopologyContext context, SpoutOutputCollector collector) {this.collector =  collector;Properties props = new Properties(); // zookeeper 配置          props.put("zookeeper.connect", "192.168.170.185:2181");            // 消费者所在组          props.put("group.id", "testgroup");            // zk连接超时          props.put("zookeeper.session.timeout.ms", "4000");          props.put("zookeeper.sync.time.ms", "200");          props.put("auto.commit.interval.ms", "1000");          props.put("auto.offset.reset", "smallest");                    // 序列化类          props.put("serializer.class", "kafka.serializer.StringEncoder");            ConsumerConfig config = new ConsumerConfig(props);  this.consumer = Consumer.createJavaConsumerConnector(config);}
@Overridepublic void declareOutputFields(OutputFieldsDeclarer declarer) {Fields field = new Fields("name", "sentence","sentence2");declarer.declare(field);}
}

storm trident的filter和函数的更多相关文章

  1. storm trident function函数

    package cn.crxy.trident; import java.util.List; import backtype.storm.Config; import backtype.storm. ...

  2. Strom-7 Storm Trident 详细介绍

    一.概要 1.1 Storm(简介)      Storm是一个实时的可靠地分布式流计算框架.      具体就不多说了,举个例子,它的一个典型的大数据实时计算应用场景:从Kafka消息队列读取消息( ...

  3. Storm Trident API

    在Storm Trident中有五种操作类型 Apply Locally:本地操作,所有操作应用在本地节点数据上,不会产生网络传输 Repartitioning:数据流重定向,单纯的改变数据流向,不会 ...

  4. Storm专题二:Storm Trident API 使用具体解释

    一.概述      Storm Trident中的核心数据模型就是"Stream",也就是说,Storm Trident处理的是Stream.可是实际上Stream是被成批处理的. ...

  5. storm trident 示例

    Storm Trident的核心数据模型是一批一批被处理的“流”,“流”在集群的分区在集群的节点上,对“流”的操作也是并行的在每个分区上进行. Trident有五种对“流”的操作: 1.      不 ...

  6. storm trident merger

    import java.util.List; import backtype.storm.Config; import backtype.storm.LocalCluster; import back ...

  7. Python【day 14-5】sorted filter map函数应用和练习

    '''''' ''' 内置函数或者和匿名函数结合输出 4,用map来处理字符串列表,把列表中所有人都变成sb,比方alex_sb name=[‘oldboy’,'alex','wusir'] 5,用m ...

  8. lambda匿名函数sorted排序函数filter过滤函数map映射函数

    lambda函数:表示匿名函数,不需要def来声明,一句话就能搞定. 语法:函数名=lamda 参数:返回值 求10的10次方 f=lambda n:n**n print(f(10)) 注意: 函数名 ...

  9. storm trident 的介绍与使用

    一.trident 的介绍 trident 的英文意思是三叉戟,在这里我的理解是因为之前我们通过之前的学习topology spout bolt 去处理数据是没有问题的,但trident 的对spou ...

随机推荐

  1. 让Spinner中的文字居中

    如果套用simple_spinner_item或是simple_spinner_dropdown_item,然后直接在Spinner中用 android:gravity="center&qu ...

  2. UNP总结 Chapter 11 名字与地址转换

    本章讲述在名字和数值地址间进行转换的函数:gethostbyname和gethostbyaddr在主机名字与IP地址间进行转换,getservbyname和getservbyport在服务器名字和端口 ...

  3. MSTAR SERVICE结构

    程序结构: 1.主线程的构建 appMain.c/appMain_Create(): 2.主线程服务构建  _appMain_Task() 清空服务: memset(_appMain.appList, ...

  4. SSIS 事务

    本文摘自:http://www.cnblogs.com/tylerdonet/archive/2011/09/23/2186579.html 在这一个随笔中将介绍在package中如何使用事务来保证数 ...

  5. CSS元素:clip属性作用说明

    clip属性是一个比较有用的属性,但往往在实际应用中,并不多见,介绍的也很少.应用clip属性需要注意的两点: 一.clip属性必须和定位属性postion一起使用才能生效. 二.clip裁切的计算坐 ...

  6. OpenService 打开一个已经存在的服务

    SC_HANDLE WINAPI OpenService( _In_ SC_HANDLE hSCManager, _In_ LPCTSTR lpServiceName, _In_ DWORD dwDe ...

  7. SQLite win7

    https://blog.csdn.net/louislee92/article/details/50390000 vs2008利用sqlite A 添加sqlite3.h sqlite3.lib到工 ...

  8. Several ports (8005, 8080, 8009) required

    Several ports (8005, 8080, 8009) required by Tomcat v7.0 Server at localhost are already in use. The ...

  9. 继承映射关系 subclass的查询

    Person大类的映射文件配置 1 <hibernate-mapping package="com.zh.hibernate.subclass"> <class ...

  10. PHP文件操作功能函数大全

    PHP文件操作功能函数大全 <?php /* 转换字节大小 */ function transByte($size){ $arr=array("B","KB&quo ...