flumeng-kafka-plugin
github 参考地址:https://github.com/beyondj2ee/flumeng-kafka-plugin/tree/master/flumeng-kafka-plugin
/*
* Copyright (c) 2013.09.06 BeyondJ2EE.
* * All right reserved.
* * http://beyondj2ee.github.com
* * This software is the confidential and proprietary information of BeyondJ2EE
* * , Inc. You shall not disclose such Confidential Information and
* * shall use it only in accordance with the terms of the license agreement
* * you entered into with BeyondJ2EE.
* *
* * Revision History
* * Author Date Description
* * =============== ================ ======================================
* * beyondj2ee
*
*/
package org.apache.flume.plugins;
/**
* KAFKA Flume Sink (Kafka 0.8 Beta, Flume 1.4).
* User: beyondj2ee
* Date: 13. 9. 4
* Time: PM 4:32
*/
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.event.EventHelper;
import org.apache.flume.sink.AbstractSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
/**
* kafka sink.
*/
public class KafkaSink extends AbstractSink implements Configurable {
// - [ constant fields ] ----------------------------------------
/**
* The constant logger.
*/
private static final Logger LOGGER = LoggerFactory
.getLogger(KafkaSink.class);
// - [ variable fields ] ----------------------------------------
/**
* The Parameters.
*/
private Properties parameters;
/**
* The Producer.
*/
private Producer<String, String> producer;
/**
* The Context.
*/
private Context context;
private int i = 100;
// - [ interface methods ] ------------------------------------
/**
* Configure void.
*
* @param context
* the context
*/
@Override
public void configure(Context context) {
this.context = context;
ImmutableMap<String, String> props = context.getParameters();
parameters = new Properties();
for (String key : props.keySet()) {
String value = props.get(key);
this.parameters.put(key, value);
LOGGER.info("key is " + key + " value is " + value);
}
}
/**
* Start void.
*/
@Override
public synchronized void start() {
super.start();
ProducerConfig config = new ProducerConfig(this.parameters);
this.producer = new Producer<String, String>(config);
}
/**
* Process status.
*
* @return the status
* @throws EventDeliveryException
* the event delivery exception
*/
@Override
public Status process() throws EventDeliveryException {
Status status = null;
// Start transaction
Channel ch = getChannel();
Transaction txn = ch.getTransaction();
txn.begin();
try {
// This try clause includes whatever Channel operations you want to
// do
Event event = ch.take();
String partitionKey = (String) parameters
.get(KafkaFlumeConstans.PARTITION_KEY_NAME);
String encoding = StringUtils.defaultIfEmpty(
(String) this.parameters
.get(KafkaFlumeConstans.ENCODING_KEY_NAME),
KafkaFlumeConstans.DEFAULT_ENCODING);
String topic = Preconditions.checkNotNull((String) this.parameters
.get(KafkaFlumeConstans.CUSTOME_TOPIC_KEY_NAME),
"custom.topic.name is required");
String eventData = new String(event.getBody(), encoding);
KeyedMessage<String, String> data;
// if partition key does'nt exist
if (StringUtils.isEmpty(partitionKey)) {
data = new KeyedMessage<String, String>(topic, eventData);
} else {
data = new KeyedMessage<String, String>(topic, partitionKey,
eventData);
}
// if (LOGGER.isInfoEnabled()) {
// LOGGER.info("Send Message to Kafka *************************");
// }
if (i == 0) {
LOGGER.info("100 message send ");
i = 100;
}
i = i - 1;
producer.send(data);
txn.commit();
status = Status.READY;
} catch (Throwable t) {
txn.rollback();
status = Status.BACKOFF;
// re-throw all Errors
if (t instanceof Error) {
LOGGER.info("send data error ",t);
throw (Error) t;
}
} finally {
txn.close();
}
return status;
}
/**
* Stop void.
*/
@Override
public void stop() {
producer.close();
}
// - [ protected methods ] --------------------------------------
// - [ public methods ] -----------------------------------------
// - [ private methods ] ----------------------------------------
// - [ static methods ] -----------------------------------------
// - [ getter/setter methods ] ----------------------------------
// - [ main methods ] -------------------------------------------
}
flumeng-kafka-plugin的更多相关文章
- Installing the Ranger Kafka Plug-in
This section describes how to install and enable the Ranger Kafka plug-in. The Ranger Kafka plug-in ...
- IBM developer:Setting up the Kafka plugin for Ranger
Follow these steps to enable and configure the Kafka plugin for Ranger. Before you begin The default ...
- Flume-ng+Kafka+storm的学习笔记
Flume-ng Flume是一个分布式.可靠.和高可用的海量日志采集.聚合和传输的系统. Flume的文档可以看http://flume.apache.org/FlumeUserGuide.html ...
- flume-ng+Kafka+Storm+HDFS 实时系统搭建
转自:http://www.tuicool.com/articles/mMrQnu7 一 直以来都想接触Storm实时计算这块的东西,最近在群里看到上海一哥们罗宝写的Flume+Kafka+Storm ...
- 大数据架构:flume-ng+Kafka+Storm+HDFS 实时系统组合
http://www.aboutyun.com/thread-6855-1-1.html 个人观点:大数据我们都知道hadoop,但并不都是hadoop.我们该如何构建大数据库项目.对于离线处理,ha ...
- [转]flume-ng+Kafka+Storm+HDFS 实时系统搭建
http://blog.csdn.net/weijonathan/article/details/18301321 一直以来都想接触Storm实时计算这块的东西,最近在群里看到上海一哥们罗宝写的Flu ...
- 转:大数据架构:flume-ng+Kafka+Storm+HDFS 实时系统组合
虽然比较久,但是这套架构已经很成熟了,记录一下 一般数据流向,从“数据采集--数据接入--流失计算--数据输出/存储”<ignore_js_op> 1).数据采集 负责从各节点上实时采集数 ...
- flume-ng+Kafka+Storm+HDFS 实时系统组合
http://www.aboutyun.com/thread-6855-1-1.html
- flume和kafka整合(转)
原文链接:Kafka flume 整合 前提 前提是要先把flume和kafka独立的部分先搭建好. 下载插件包 下载flume-kafka-plus:https://github.com/beyon ...
- Kafka Ecosystem(Kafka生态)
http://kafka.apache.org/documentation/#ecosystem https://cwiki.apache.org/confluence/display/KAFKA/E ...
随机推荐
- solve_lock-1024-大功告成
create or replace procedure solve_lock_061203(v_msg out varchar2) as v_sql varchar2(3000); --定义 v_s ...
- 《service》-“linux命令五分钟系列”之二
本原创文章属于<Linux大棚>博客. 博客地址为http://roclinux.cn. 文章作者为roc 希望您能通过捐款的方式支持Linux大棚博客的运行和发展.请见“关于捐款” == ...
- CentOS7开机启动管理systemd简介及使用
systemd提供更优秀的框架以表示系统服务间的依赖关系实现系统初始化时服务的并行启动,同时达到降低Shell的系统开销的效果systemd的目标是:尽可能启动更少进程:尽可能将更多进程并行启动.sy ...
- Oracle数据库之PL/SQL触发器
Oracle数据库之PL/SQL触发器 1. 介绍 触发器(trigger)是数据库提供给程序员和数据分析员来保证数据完整性的一种方法,它是与表事件相关的特殊的存储过程,它的执行不是由程序调用,也不是 ...
- SVN更新失败,提示locked
使用SVN更新资源时,提示locked,解决方案如下: 首先找到是哪个文件不能进行更新/提交,在本地工作区间中找到这个文件对应的目录,目录里面会有.svn文件夹,这个文件夹默认是隐藏的,需要设置文件夹 ...
- CSS实现图片在div a标签中水平垂直居中
CSS实现图片在div a标签中水平垂直居中 <div class="demo"> <a href="#"> <img src=& ...
- nginx 一般配置实例 静态页面
# 使用的用户和组 user www www; # 指定工作衍生进程数(一般等于CPU的总核数或总核数的两倍,例如两个四核CPU,则总核数为8) worker_processes 8; # 指定错误日 ...
- python类class基础
44.class类: 一.类定义的一般形式: 1.简单的形式:实例化对象没有自己独有 ...
- Python3.X与urllib
在Python3.X中使用urllib时,不能像Python2.X一样直接使用: import urllib response = urllib.urlopen("http://www.ba ...
- IOS“多继承”
转自念茜的博客: 当单继承不够用,很难为问题域建模时,我们通常都会直接想到多继承.多继承是从多余一个直接基类派生类的能力,可以更加直接地为应用程序建模.但是Objective-C不支持多继承,由于消息 ...