使用java客户端, kafkaproducer, kafkaconsumer进行kafka的连接

注: 0.10 版本之后, 连接kafka只需要brokerip即可, 不需要zookeeper的信息

1, kafka 配置信息

{
"producer": {
"bootstrap.servers": "10.183.93.127:9093,10.183.93.128:9093,10.183.93.130:9093",
"key.serializer": "org.apache.kafka.common.serialization.StringSerializer",
"value.serializer": "org.apache.kafka.common.serialization.StringSerializer",
"max.request.size": "",
"batch.size": "",
"buffer.memory": "",
"max.block.ms": "",
"retries": "",
"acks": "",
},
"cosumer": {
"bootstrap.servers": "10.183.93.127:9093,10.183.93.128:9093,10.183.93.130:9093",
"group.id": "test222",
"session.timeout.ms": "",
"key.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",
"value.deserializer": "org.apache.kafka.common.serialization.StringDeserializer"
}
}

2, kafka utils, 用来读取kafka的配置信息

package com.wenbronk.kafka;

import com.alibaba.fastjson.JSON;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.junit.Test; import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.Map;
import java.util.Properties; public class KafkaUtils {
@Test
public void test() throws FileNotFoundException {
getConfig("producer");
// fastJSON();
} public static JsonObject getConfig(String name) throws FileNotFoundException {
JsonParser parser = new JsonParser();
JsonElement parse = parser.parse(new FileReader("src/main/resources/kafka"));
JsonObject jsonObject = parse.getAsJsonObject().getAsJsonObject(name);
System.out.println(jsonObject);
return jsonObject;
} public static Properties getProperties(String sourceName) throws FileNotFoundException {
JsonObject config = KafkaUtils.getConfig(sourceName);
Properties properties = new Properties(); for (Map.Entry<String, JsonElement> entry : config.entrySet()) {
properties.put(entry.getKey(), entry.getValue().getAsString());
}
return properties;
} // public static void fastJSON() throws FileNotFoundException {
// Object o = JSON.toJSON(new FileReader("src/main/resources/kafka"));
// System.out.println(o);
// } }

3, kafka producer

package com.wenbronk.kafka;

import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.junit.Test; import javax.swing.text.StyledEditorKit;
import java.io.FileNotFoundException;
import java.util.*;
import java.util.stream.IntStream; /**
* 消息提供者
*/
public class KafkaProducerMain { @Test
public void send() throws Exception {
HashMap<String, String> map = new HashMap<>();
map.put("http_zhixin", "send message to kafka from producer");
for (int i = ; i < ; i++ ) {
sendMessage(map);
}
// sendMessage(map);
} /**
* 消息发送
*/
public void sendMessage(Map<String, String> topicMsg) throws FileNotFoundException {
Properties properties = KafkaUtils.getProperties("producer");
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(properties); for (Map.Entry<String, String> entry : topicMsg.entrySet()) {
String topic = entry.getKey();
String message = entry.getValue();
ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, message);
// 发送
// producer.send(record, new CallBackFuntion(topic, message));
producer.send(record, (recordMetadata, e) -> {
if (e != null) {
System.err.println(topic + ": " + message + "--消息发送失败");
}else {
System.err.println(topic + ": " + message + "--消息发送成功");
}
});
}
producer.flush();
producer.close();
}
}

回掉函数可写匿名内部类, 也可写外部类通过新建的方式运行

package com.wenbronk.kafka;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.RecordMetadata; /**
* 回掉函数
*/
public class CallBackFuntion implements Callback { private String topic;
private String message; public CallBackFuntion(String topic, String message) {
this.topic = topic;
this.message = message;
} @Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (e != null) {
System.out.println(topic + ": " + message + "--消息发送失败");
}else {
System.out.println(topic + ": " + message + "--消息发送成功");
}
}
}

4, kafka consumer

package com.wenbronk.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.junit.Test; import java.io.FileNotFoundException;
import java.util.*; public class KafkaConsumerMain { /**
* 自动提交offset
*/
public void commitAuto(List<String> topics) throws FileNotFoundException {
Properties props = KafkaUtils.getProperties("cosumer");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", ""); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(topics);
while (true) {
ConsumerRecords<String, String> records = consumer.poll();
for (ConsumerRecord<String, String> record : records)
System.err.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
} /**
* 手动提交offset
*
* @throws FileNotFoundException
*/
public void commitControl(List<String> topics) throws FileNotFoundException {
Properties props = KafkaUtils.getProperties("cosumer");
props.put("enable.auto.commit", "false"); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(topics);
final int minBatchSize = ;
List<ConsumerRecord<String, String>> buffer = new ArrayList<>();
while (true) {
ConsumerRecords<String, String> records = consumer.poll();
for (ConsumerRecord<String, String> record : records) {
buffer.add(record);
}
if (buffer.size() >= minBatchSize) {
insertIntoDb(buffer);
// 阻塞同步提交
consumer.commitSync();
buffer.clear();
}
}
} /**
* 手动设置分区
*/
public void setOffSet(List<String> topics) throws FileNotFoundException {
Properties props = KafkaUtils.getProperties("cosumer");
props.put("enable.auto.commit", "false"); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(topics); while (true) {
ConsumerRecords<String, String> records = consumer.poll(Long.MAX_VALUE);
// 处理每个分区消息后, 提交偏移量
for (TopicPartition partition : records.partitions()) {
List<ConsumerRecord<String, String>> partitionRecords = records.records(partition); for (ConsumerRecord<String, String> record : partitionRecords) {
System.out.println(record.offset() + ": " + record.value());
}
long lastOffset = partitionRecords.get(partitionRecords.size() - ).offset();
consumer.commitSync(Collections.singletonMap(partition, new OffsetAndMetadata(lastOffset + )));
}
}
} /**
* 手动设置消息offset
*/
public void setSeek(List<String> topics) throws FileNotFoundException {
Properties props = KafkaUtils.getProperties("cosumer");
props.put("enable.auto.commit", "false"); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(topics);
consumer.seek(new TopicPartition("http_zhixin", ), );
ConsumerRecords<String, String> records = consumer.poll(); for (ConsumerRecord<String, String> record : records) {
System.err.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
consumer.commitSync();
} } @Test
public void test() throws FileNotFoundException {
ArrayList<String> topics = new ArrayList<>();
topics.add("http_zhixin"); // commitAuto(topics);
// commitControl(topics);
// setOffSet(topics);
setSeek(topics);
} /**
* doSomethings
*/
private void insertIntoDb(List<ConsumerRecord<String, String>> buffer) {
buffer.stream().map(x -> x.value()).forEach(System.err::println);
} }

kafka 处于同一组的消费者, 不可以重复读取消息, 0.11版本中加入了事物控制

kafka-java客户端连接的更多相关文章

  1. 【RabbitMQ】CentOS安装RabbitMQ,及简单的Java客户端连接

    在CentOS安装 因Rabbit MQ使用Erlang,所以需要先安装Erlang,安装过程中可能会遇到种种问题,可参考CentOS 6.5安装Erlang/OTP 17.0.然后就可以安装MQ了. ...

  2. Elasticsearch - java客户端连接

    写在前面的话:读书破万卷,编码如有神-------------------------------------------------------------------- 最简单的在java客户端连 ...

  3. 通过java客户端连接hbase 注意事项

    1.通过Java客户端连接Hbase,其中hbase通过zookeeper去管理,需要注意的是客户端端口. 通过在浏览器端输入地址查看:http://192.168.3.206:60010/maste ...

  4. Zookeeper学习记录及Java客户端连接示例

    1. Zookeeper 1.1 简介 ZooKeeper is a centralized service for maintaining configuration information, na ...

  5. Java客户端连接kafka集群报错

    往kafka集群发送消息时,报错如下: page_visits-1: 30005 ms has passed since batch creation plus linger time 加入log4j ...

  6. 关于Java客户端连接虚拟机中的Kafka时,无法发送、接收消息的问题

    kafka通过控制台模拟消息发送和消息接收正常,但是通过javaAPI操作生产者发送消息不成功 消费者接收不到数据解决方案? 1.问题排查 (1)首先通过在服务器上使用命令行来模拟生产.消费数据,发现 ...

  7. RabbitMQ/JAVA 客户端连接测试

    这里是一个简单的helloworld测试. 这里在eclipse平台中实现 细节不再赘述.重点是导入rabbitmq-java-client的jar包 下载地址:http://www.rabbitmq ...

  8. java客户端连接MongoDB数据库的简单使用

    1.下载mongoDB的jar包,并引入到工程的CLASSPATH中下载:mongodb2.5驱动包下载 如果使用maven项目,最新的依赖如下: <dependency> <gro ...

  9. java socket通讯(二)处理多个客户端连接

    通过java socket通讯(一) 入门示例,就可以实现服务端和客户端的socket通讯,但是上一个例子只能实现一个服务端和一个客户端之间的通讯,如果有多个客户端连接服务端,则需要通过多线程技术来实 ...

  10. Java操作ElasticSearch之创建客户端连接

    Java操作ElasticSearch之创建客户端连接 3 发布时间:『 2017-09-11 17:02』  博客类别:elasticsearch  阅读(3157) Java操作ElasticSe ...

随机推荐

  1. hibernate之三种时态之间的转换

    判断状态的标准 oid  和    session相关性 public class HibernateUtils { private static SessionFactory sessionFact ...

  2. Max Sum—hdu1003(简单DP) 标签: dp 2016-05-05 20:51 92人阅读 评论(0)

    Max Sum Time Limit: 2000/1000 MS (Java/Others) Memory Limit: 65536/32768 K (Java/Others) Total Submi ...

  3. Stein算法求最大公约数

    首先引进一个符号:gcd是greatest common divisor(最大公约数)的缩写,gcd( x,y ) 表示x和y的最大公约数.然后有一个事实需要了解:一个奇数的所有约数都是奇数.这个很容 ...

  4. CF每日一题系列 —— 415A

    http://codeforces.com/problemset/page/7?order=BY_SOLVED_DESC 从5000以内选的,emmm还是比较水的哈 时间还是有的,所以万事万物贵在坚持 ...

  5. [kuangbin]树链剖分 D - 染色

    https://vjudge.net/contest/251031#problem/Dhttps://blog.csdn.net/kirito_acmer/article/details/512019 ...

  6. ASP.NET Web API 框架研究 服务容器 ServicesContainer

    ServicesContainer是一个服务的容器,可以理解为—个轻量级的IoC容器,其维护着一个服务接口类型与服务实例之间的映射关系,可以根据服务接口类型获取对应的服务实例.构成ASP.NET We ...

  7. Java设计模式详尽资料

    设计模式(Design Patterns) ——可复用面向对象软件的基础 设计模式(Design pattern)是一套被反复使用.多数人知晓的.经过分类编目的.代码设计经验的总结.使用设计模式是为了 ...

  8. DevExpress控件cxGrid实现多列模糊匹配输入的完美解决方案

    本方案不需要修改控件源码,是完美解决cxgrid或TcxDBExtLookupComboBox支持多列模糊匹配快速输入的最佳方案!! 转自https://blog.csdn.net/qq5643020 ...

  9. js中两种for循环的使用

    针对两种for循环的使用 1. for in循环的使用环境     可用在字符串.数组.对象中, 需注意:其中遍历对象得到的是每个key  的value值  2. for 变量递加的方式        ...

  10. 百度小程序button去掉默认边框

    百度小程序button去掉默认边框: button::after{ border:none; }