flume1.6+elasticsearch6.3.2

Pom

<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.elasticsearch/elasticsearch -->
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>6.4.</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.elasticsearch.client/transport -->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>6.4.</version>
</dependency>
<!-- <dependency> <groupId>io.netty</groupId> <artifactId>netty-all</artifactId>
<version>4.1..Final</version> </dependency> -->
<!-- https://mvnrepository.com/artifact/org.apache.flume.flume-ng-sinks/flume-ng-elasticsearch-sink -->
<dependency>
<groupId>org.apache.flume.flume-ng-sinks</groupId>
<artifactId>flume-ng-elasticsearch-sink</artifactId>
<version>1.6.</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.</version>
</dependency> </dependencies>

ElasticSearchForLogSink.java

package com.jachs.sink.elasticsearch;

import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurable;
import org.apache.flume.sink.AbstractSink;
import org.apache.flume.sink.elasticsearch.ElasticSearchEventSerializer;
import org.apache.flume.sink.elasticsearch.client.RoundRobinList;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient; import com.google.gson.Gson; import static org.apache.flume.sink.elasticsearch.ElasticSearchSinkConstants.CLUSTER_NAME;
import static org.apache.flume.sink.elasticsearch.ElasticSearchSinkConstants.INDEX_NAME; import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map; import static org.apache.flume.sink.elasticsearch.ElasticSearchSinkConstants.HOSTNAMES; public class ElasticSearchForLogSink extends AbstractSink implements Configurable {
private String hostNames;
private String indexName;
private String clusterName;
static TransportClient client;
static Map<String, String> dataMap = new HashMap<String, String>();; public void configure(Context context) {
hostNames = context.getString(HOSTNAMES);
indexName = context.getString(INDEX_NAME);
clusterName = context.getString(CLUSTER_NAME);
} @Override
public void start() {
Settings settings = Settings.builder().put("cluster.name", clusterName).build();
try {
client = new PreBuiltTransportClient(settings).addTransportAddress(new TransportAddress(
InetAddress.getByName(hostNames.split(":")[]), Integer.parseInt(hostNames.split(":")[])));
} catch (UnknownHostException e) {
e.printStackTrace();
}
} @Override
public void stop() {
super.stop();
} public Status process() throws EventDeliveryException {
Status status = Status.BACKOFF;
Channel ch = getChannel();
Transaction txn = ch.getTransaction();
txn.begin();
try {
Event event = ch.take();
if (event == null) {
txn.rollback();
return status;
}
String data = new String(event.getBody(), "UTF-8");
if (data.indexOf("token") != -) {
String token = data.substring(data.length() - , data.length());
System.out.println("获取标识" + token);
String sb = dataMap.get(token);
if (sb != null) {
sb = sb + data;
} else {
dataMap.put(token, data);
}
}
System.out.println("打印" + dataMap.size());
if (dataMap.size() >= ) {//十条数据一提交,条件自己改
BulkRequestBuilder bulkRequest = client.prepareBulk(); bulkRequest.add(client.prepareIndex(indexName, "text").setSource(dataMap));
bulkRequest.execute().actionGet();
dataMap.clear();
System.out.println("归零" + dataMap.size());
}
// Map<String, Object> map = new HashMap<String, Object>(); // for (String key : head.keySet()) {
// map.put("topic", key);
// map.put("timestamp", head.get(key));
// map.put("data", new String(event.getBody(), "UTF-8"));
// } // IndexRequestBuilder create = client.prepareIndex(indexName,
// "text").setSource(map);
// IndexResponse response = create.execute().actionGet(); txn.commit();
status = Status.READY;
} catch (Throwable t) {
txn.rollback();
status = Status.BACKOFF;
t.printStackTrace();
if (t instanceof Error) {
throw (Error) t;
}
} finally {
txn.close();
}
return status;
}
}

kafka生成者模仿日志写入代码

package com.test.Kafka;

import java.util.Properties;

import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord; import com.google.gson.Gson; public class App {
public static void main(String[] args) {
Properties properties = new Properties();
// properties.put("bootstrap.servers",
// "192.168.2.200:9092,192.168.2.157:9092,192.168.2.233:9092,192.168.2.194:9092,192.168.2.122:9092");
// properties.put("bootstrap.servers",
// "192.168.2.200:9092,192.168.2.233:9092,192.168.2.122:9092");
properties.put("bootstrap.servers", "127.0.0.1:9092");
properties.put("acks", "all");
properties.put("retries", );
properties.put("batch.size", );
properties.put("linger.ms", );
properties.put("buffer.memory", );
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = null;
RandomStringUtils randomStringUtils=new RandomStringUtils();
try {
producer = new KafkaProducer<String, String>(properties);
for (int i = ; i < ; i++) {// topID无所谓
producer.send(new ProducerRecord<String, String>("test1", "tokenk"+randomStringUtils.random()));
}
} catch (Exception e) {
e.printStackTrace();
} finally {
producer.close();
}
}
}

修改flume配置

a1.sinks.elasticsearch.type=com.jachs.sink.elasticsearch.ElasticSearchForLogSink

重写Sink合并多行的更多相关文章

  1. jquery动态合并表格行

    利用<td rowspan = "num"/>;原理来实现,其中num为要合并的行数. <!DOCTYPE html> <html> <h ...

  2. Js 合并 table 行 的实现方法

    Js 合并 table 行 的实现方法 需求如下: 某公司的员工档案,如下,  经理看员工的信息不是很清晰: 姓名 所在学校 毕业时间 张三 小学 2000 张三 中学 2006 张三 大学 2010 ...

  3. SQL中合并多行记录的方法总汇

    -- =============================================================================-- Title: 在SQL中分类合并数 ...

  4. C# 使用Epplus导出Excel [4]:合并指定行

    C# 使用Epplus导出Excel [1]:导出固定列数据 C# 使用Epplus导出Excel [2]:导出动态列数据 C# 使用Epplus导出Excel [3]:合并列连续相同数据 C# 使用 ...

  5. 【HANA系列】SAP HANA SQL合并多行操作

    公众号:SAP Technical 本文作者:matinal 原文出处:http://www.cnblogs.com/SAPmatinal/ 原文链接:[HANA系列]SAP HANA SQL合并多行 ...

  6. 合并表格行---三层for循环遍历数据

    合并表格行---三层for循环遍历数据 示例1 json <!DOCTYPE html> <html lang="zh_cn"> <head> ...

  7. 详细说明svn分支与合并---命令行

    一,svn分支与合并有什么用? 作程序的,对svn在熟悉不过了,但对svn分支熟悉的,我想并不多.因为一般情况下,是用不着svn分支的,其实也没有那个必要.下面我例举几个需要用到svn分支的情况: 1 ...

  8. html表格合并(行,一排)

    <table> <tr> <td colspan="2">失败的例子:</td> </tr> {% for ip , j ...

  9. SQL SERVER 字符合并多行为一列

    [字符合并多行为一列] 思路1:行转列,在与字符拼接(适用每组列数名相同) 思路2:转xml,去掉多余字符(适用所有) 假设兴趣表Hobbys Name Hobby 小张 打篮球 小张 踢足球 Nam ...

随机推荐

  1. Java集合详解2:LinkedList和Queue

    今天我们来探索一下HashMap和HashTable机制与比较器的源码. 具体代码在我的GitHub中可以找到 https://github.com/h2pl/MyTech 喜欢的话麻烦star一下哈 ...

  2. 用python读文件如.c文件生成excel文件

    记录一下,如何实现的,代码如下: #!/usr/bin/env python # coding=utf-8 # 打开文件 import xlwt import re import sys bookfi ...

  3. matplotlib安装

    Windows / Linux pip 相关依赖 Python (>= 2.7 or >= 3.4) NumPy (>= 1.7.1) setuptools dateutil (&g ...

  4. 值得一看的35个Redis常用问题总结

    1.什么是redis? Redis 是一个基于内存的高性能key-value数据库. 2.Reids的特点 Redis本质上是一个Key-Value类型的内存数据库,很像memcached,整个数据库 ...

  5. PHP类和函数注释大全

    每次要用PHP的某个功能的时候,都要去查一下,于是决定将PHP所有类和函数都整理出来,加上注释 大致实现 将php.jar文件解压,取出目录stubs 将stubs中的所有php文件中的注释去掉,并做 ...

  6. Servlet_note

    2015/8/24 Web项目目录结构:总目录my,中有WEB-INF目录,中有classes.lib两目录和web.xml文件.classes保存编译好的java文件,lib保存库文件,web.xm ...

  7. 基于Android的模拟点击探索

    前言 压力测试中,一般会用到自动化测试.准备写一个APP,可以记录屏幕上的点击事件,然后通过shell命令来模拟自动执行.shell指令,比较容易实现.那么,关键的一步是获取点击的坐标.对于Andro ...

  8. 【English】七、常见动词

    一.动词: touch.hear.say.listen touch [tʌtʃ] 触摸 I touch the cat. They touch the elephant. hear  [hɪr] 听到 ...

  9. QC API全系列揭秘之Test Execution操作(全网首发)

    (原创文章,转载请注明出处.) 一.QC简介: Quality Center存在至今已经走过了10多个年头,名字从一开始的TD,到后来的QC,再到现在的ALM.所属公司从开始的Mercury到现在的H ...

  10. linux下oracle启动关闭

    1.以oracle身份登录数据库,命令:su – oracle 2.执行以下命令查看数据库监听器的状况: lsnrctl status 3.执行以下命令停止数据库监听器运行: lsnrctl stop ...