package com.hello.hbase;

import java.nio.charset.Charset;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern; import org.apache.commons.lang.RandomStringUtils;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.flume.conf.ComponentConfiguration;
import org.apache.flume.sink.hbase.HbaseEventSerializer;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Row;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists; public class FlumeHbaseEventSerializer implements HbaseEventSerializer { // Config vars
/** Regular expression used to parse groups from event data. */
public static final String REGEX_CONFIG = "regex";
public static final String REGEX_DEFAULT = " ";
/** Whether to ignore case when performing regex matches. */
public static final String IGNORE_CASE_CONFIG = "regexIgnoreCase";
public static final boolean INGORE_CASE_DEFAULT = false;
/** Comma separated list of column names to place matching groups in. */
public static final String COL_NAME_CONFIG = "colNames";
public static final String COLUMN_NAME_DEFAULT = "ip";
/** Index of the row key in matched regex groups */
public static final String ROW_KEY_INDEX_CONFIG = "rowKeyIndex";
/** Placeholder in colNames for row key */
public static final String ROW_KEY_NAME = "ROW_KEY";
/** Whether to deposit event headers into corresponding column qualifiers */
public static final String DEPOSIT_HEADERS_CONFIG = "depositHeaders";
public static final boolean DEPOSIT_HEADERS_DEFAULT = false;
/** What charset to use when serializing into HBase's byte arrays */
public static final String CHARSET_CONFIG = "charset";
public static final String CHARSET_DEFAULT = "UTF-8";
/*
* This is a nonce used in HBase row-keys, such that the same row-key never
* gets written more than once from within this JVM.
*/
protected static final AtomicInteger nonce = new AtomicInteger(0);
protected static String randomKey = RandomStringUtils.randomAlphanumeric(10);
protected byte[] cf;
private byte[] payload;
private List<byte[]> colNames = Lists.newArrayList();
private boolean regexIgnoreCase;
private Charset charset;
@Override
public void configure(Context context) {
String regex = context.getString(REGEX_CONFIG, REGEX_DEFAULT);
regexIgnoreCase = context.getBoolean(IGNORE_CASE_CONFIG, INGORE_CASE_DEFAULT);
context.getBoolean(DEPOSIT_HEADERS_CONFIG, DEPOSIT_HEADERS_DEFAULT);
Pattern.compile(regex, Pattern.DOTALL + (regexIgnoreCase ? Pattern.CASE_INSENSITIVE : 0));
charset = Charset.forName(context.getString(CHARSET_CONFIG, CHARSET_DEFAULT)); String cols = new String(context.getString("columns"));
String colNameStr;
if (cols != null && !"".equals(cols)) {
colNameStr = cols;
} else {
colNameStr = context.getString(COL_NAME_CONFIG, COLUMN_NAME_DEFAULT);
} String[] columnNames = colNameStr.split(",");
for (String s : columnNames) {
colNames.add(s.getBytes(charset));
}
} @Override
public void configure(ComponentConfiguration conf) {} @Override
public void initialize(Event event, byte[] columnFamily) {
event.getHeaders();
this.payload = event.getBody();
this.cf = columnFamily;
} protected byte[] getRowKey(Calendar cal) {
String str = new String(payload, charset);
String tmp = str.replace("\"", "");
String[] arr = tmp.split(" ");
String log_data = arr[4];
String[] param_arr = log_data.split("&");
String userid = param_arr[0];
String itemid = param_arr[1];
String type = param_arr[2];
String ip_str = param_arr[3]; // String dataStr = arr[3].replace("[", "");
// String rowKey = getDate2Str(dataStr) + "-" + clientIp + "-" + nonce.getAndIncrement();
String rowKey = ip_str + "-" + nonce.getAndIncrement(); return rowKey.getBytes(charset);
} protected byte[] getRowKey() {
return getRowKey(Calendar.getInstance());
} @Override
public List<Row> getActions() throws FlumeException {
List<Row> actions = Lists.newArrayList();
byte[] rowKey; String body = new String(payload, charset);
String tmp = body.replace("\"", "");
// String[] arr = tmp.split(REGEX_DEFAULT);
String[] arr = tmp.split(" "); String log_data = arr[4];
String[] param_arr = log_data.split("&"); String userid = param_arr[0].split("=")[1];
String itemid = param_arr[1].split("=")[1];
String type = param_arr[2].split("=")[1];
String ip_str = param_arr[3].split("=")[1]; System.out.println("===========");
System.out.println("===========");
System.out.println("===========");
System.out.println("===========");
System.out.println(userid);
System.out.println(itemid);
System.out.println(type);
System.out.println(ip_str);
System.out.println("===========");
System.out.println("===========");
System.out.println("===========");
System.out.println("==========="); try {
rowKey = getRowKey();
Put put = new Put(rowKey);
put.add(cf, colNames.get(0), userid.getBytes(Charsets.UTF_8));
put.add(cf, colNames.get(1), itemid.getBytes(Charsets.UTF_8));
put.add(cf, colNames.get(2), type.getBytes(Charsets.UTF_8));
put.add(cf, colNames.get(3), ip_str.getBytes(Charsets.UTF_8));
actions.add(put);
} catch (Exception e) {
throw new FlumeException("Could not get row key!", e);
}
return actions;
} @Override
public List<Increment> getIncrements() {
return Lists.newArrayList();
} @Override
public void close() {} public static String getDate2Str(String dataStr) {
SimpleDateFormat formatter = null;
SimpleDateFormat format = null;
Date date = null;
try {
formatter = new SimpleDateFormat("dd/MMM/yyyy:hh:mm:ss", Locale.ENGLISH);
date = formatter.parse(dataStr);
format = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss");
} catch (Exception e) {
e.printStackTrace();
} return format.format(date);
}
}

自定义flume的hbase sink 的序列化程序的更多相关文章

  1. flink-----实时项目---day07-----1.Flink的checkpoint原理分析 2. 自定义两阶段提交sink(MySQL) 3 将数据写入Hbase(使用幂等性结合at least Once实现精确一次性语义) 4 ProtoBuf

    1.Flink中exactly once实现原理分析 生产者从kafka拉取数据以及消费者往kafka写数据都需要保证exactly once.目前flink中支持exactly once的sourc ...

  2. Flume-Hbase-Sink针对不同版本flume与HBase的适配研究与经验总结

    https://cloud.tencent.com/developer/article/1025430 Flume-Hbase-Sink针对不同版本flume与HBase的适配研究与经验总结 mike ...

  3. 通过用 .NET 生成自定义窗体设计器来定制应用程序

    通过用 .NET 生成自定义窗体设计器来定制应用程序 https://www.microsoft.com/china/MSDN/library/netFramework/netframework/Cu ...

  4. asp.net项目发布网上-当前自定义错误设置禁止远程查看应用程序

    早上服务器的系统突然出错了,悲剧~ ==============异常信息:============================== 服务器上出现应用程序错误.此应用程序的当前自定义错误设置禁止远程 ...

  5. 自定义Kubernetes调度程序来编排高可用性应用程序

    自定义Kubernetes调度程序来编排高可用性应用程序 只要愿意遵守规则,在Kubernetes上进行部署和乘飞机旅行就可以很愉快.通常,事情会"正常工作".但是,如果有兴趣与必 ...

  6. uniapp自定义顶部搜索框兼容微信小程序

    zhuanzai:  uniapp自定义顶部搜索框兼容微信小程序 自定义组件 navbarvue (胶囊底部高度 - 状态栏的高度) + (胶囊顶部高度 - 状态栏内的高度) = 导航栏的高度 < ...

  7. 如何在自定义端口上运行 Spring Boot 应用程序?

    为了在自定义端口上运行 Spring Boot 应用程序,您可以在 application.properties 中指定端口. server.port = 8090

  8. flume 自己定义 hbase sink 类

    參考(向原作者致敬) http://ydt619.blog.51cto.com/316163/1230586 https://blogs.apache.org/flume/entry/streamin ...

  9. 自定义Flume Sink:ElasticSearch Sink

    Flume Sink的目的是从Flume Channel中获取数据然后输出到存储或者其他Flume Source中.Flume Agent启动的时候,它会为每一个Sink都启动一个SinkRunner ...

随机推荐

  1. mvc4使用百度ueditor编辑器

    前言 配置.net mvc4项目使用ueditor编辑器,在配置过程中遇见了好几个问题,以此来记录解决办法.编辑器可以到http://ueditor.baidu.com/website/downloa ...

  2. <Codis><JedisPool><DeadLock>

    Overview Background: I start a thread [call thread A below]in Spark driver to handle opening files i ...

  3. 并行【parallel】和并发【concurrency】线程是并发还是并行,进程是并发还是并行

    线程是并发,进程是并行:进程之间相互独立,是系统分配资源的最小单位,同一个线程中的所有线程共享资源. 并行,同一时刻多个任务同时在运行. 并发,在同一时间内隔内多个任务都在运行,但是都不会在同一时刻同 ...

  4. winform 异性窗体的实现

    效果图 首先需要在vs里添加控件  AlphaForm.dll 添加完了有这来两个控件 1.把第二个控件拖入窗体里把窗体铺满 2.找一张至少有一个闭合图形的透明图片 设置为AlphaFormTrans ...

  5. HDU 1004 Let the Balloon Rise(map应用)

    Problem Description Contest time again! How excited it is to see balloons floating around. But to te ...

  6. PAT乙级 1031. 查验身份证(15)

    题目传送:https://www.patest.cn/contests/pat-b-practise/1031 一个合法的身份证号码由17位地区.日期编号和顺序编号加1位校验码组成.校验码的计算规则如 ...

  7. 栈 c实现

    栈的数组实现 stack.h #ifndef _STACK_ #define _STACK_ #define SIZE 100 typedef int data_t; typedef struct h ...

  8. MySQL常见建表选项及约束

    阅读目录---MySQL常见的建表选项及约束: 1.create table选项 1.指定列选项:default.comment 2.指定表选项:engine.auto_increment.comme ...

  9. C语音下改变const变量的值的奇葩方法

    恶心,超恶心~~

  10. epoll c++封装

    #ifndef _BFC_EPOLL_FLOW_HPP_ #define _BFC_EPOLL_FLOW_HPP_ #include <string.h> #include <err ...