漫游kafka实战篇之搭建Kafka开发环境
- <dependency>
- <groupId> org.apache.kafka</groupId >
- <artifactId> kafka_2.10</artifactId >
- <version> 0.8.0</ version>
- </dependency>
- package com.sohu.kafkademon;
- public interface KafkaProperties
- {
- final static String zkConnect = "10.22.10.139:2181";
- final static String groupId = "group1";
- final static String topic = "topic1";
- final static String kafkaServerURL = "10.22.10.139";
- final static int kafkaServerPort = 9092;
- final static int kafkaProducerBufferSize = 64 * 1024;
- final static int connectionTimeOut = 20000;
- final static int reconnectInterval = 10000;
- final static String topic2 = "topic2";
- final static String topic3 = "topic3";
- final static String clientId = "SimpleConsumerDemoClient";
- }
- package com.sohu.kafkademon;
- import java.util.Properties;
- import kafka.producer.KeyedMessage;
- import kafka.producer.ProducerConfig;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaProducer extends Thread
- {
- private final kafka.javaapi.producer.Producer<Integer, String> producer;
- private final String topic;
- private final Properties props = new Properties();
- public KafkaProducer(String topic)
- {
- props.put("serializer.class", "kafka.serializer.StringEncoder");
- props.put("metadata.broker.list", "10.22.10.139:9092");
- producer = new kafka.javaapi.producer.Producer<Integer, String>(new ProducerConfig(props));
- this.topic = topic;
- }
- @Override
- public void run() {
- int messageNo = 1;
- while (true)
- {
- String messageStr = new String("Message_" + messageNo);
- System.out.println("Send:" + messageStr);
- producer.send(new KeyedMessage<Integer, String>(topic, messageStr));
- messageNo++;
- try {
- sleep(3000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- }
- }
- package com.sohu.kafkademon;
- import java.util.HashMap;
- import java.util.List;
- import java.util.Map;
- import java.util.Properties;
- import kafka.consumer.ConsumerConfig;
- import kafka.consumer.ConsumerIterator;
- import kafka.consumer.KafkaStream;
- import kafka.javaapi.consumer.ConsumerConnector;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaConsumer extends Thread
- {
- private final ConsumerConnector consumer;
- private final String topic;
- public KafkaConsumer(String topic)
- {
- consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
- createConsumerConfig());
- this.topic = topic;
- }
- private static ConsumerConfig createConsumerConfig()
- {
- Properties props = new Properties();
- props.put("zookeeper.connect", KafkaProperties.zkConnect);
- props.put("group.id", KafkaProperties.groupId);
- props.put("zookeeper.session.timeout.ms", "40000");
- props.put("zookeeper.sync.time.ms", "200");
- props.put("auto.commit.interval.ms", "1000");
- return new ConsumerConfig(props);
- }
- @Override
- public void run() {
- Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
- topicCountMap.put(topic, new Integer(1));
- Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
- KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
- ConsumerIterator<byte[], byte[]> it = stream.iterator();
- while (it.hasNext()) {
- System.out.println("receive:" + new String(it.next().message()));
- try {
- sleep(3000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
- }
- package com.sohu.kafkademon;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaConsumerProducerDemo
- {
- public static void main(String[] args)
- {
- KafkaProducer producerThread = new KafkaProducer(KafkaProperties.topic);
- producerThread.start();
- KafkaConsumer consumerThread = new KafkaConsumer(KafkaProperties.topic);
- consumerThread.start();
- }
- }
- package com.sohu.kafkademon;
- import java.util.HashMap;
- import java.util.List;
- import java.util.Map;
- import java.util.Properties;
- import kafka.consumer.ConsumerConfig;
- import kafka.consumer.ConsumerIterator;
- import kafka.consumer.KafkaStream;
- import kafka.javaapi.consumer.ConsumerConnector;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaConsumer extends Thread
- {
- private final ConsumerConnector consumer;
- private final String topic;
- public KafkaConsumer(String topic)
- {
- consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
- createConsumerConfig());
- this.topic = topic;
- }
- private static ConsumerConfig createConsumerConfig()
- {
- Properties props = new Properties();
- props.put("zookeeper.connect", KafkaProperties.zkConnect);
- props.put("group.id", KafkaProperties.groupId);
- props.put("zookeeper.session.timeout.ms", "40000");
- props.put("zookeeper.sync.time.ms", "200");
- props.put("auto.commit.interval.ms", "1000");
- return new ConsumerConfig(props);
- }
- @Override
- public void run() {
- Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
- topicCountMap.put(topic, new Integer(1));
- Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
- KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
- ConsumerIterator<byte[], byte[]> it = stream.iterator();
- while (it.hasNext()) {
- System.out.println("receive:" + new String(it.next().message()));
- try {
- sleep(3000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
- }
漫游kafka实战篇之搭建Kafka开发环境的更多相关文章
- 漫游kafka实战篇之搭建Kafka开发环境(3)
上篇文章中我们搭建了kafka的服务器,并可以使用Kafka的命令行工具创建topic,发送和接收消息.下面我们来搭建kafka的开发环境. 添加依赖 搭建开发环境需要引入kafka的jar包 ...
- 漫游Kafka实战篇之搭建Kafka运行环境
接下来一步一步搭建Kafka运行环境. Step 1: 下载Kafka 点击下载最新的版本并解压. > tar -xzf kafka_2.9.2-0.8.1.1.tgz > cd kafk ...
- 漫游Kafka实战篇之搭建Kafka运行环境(2)
接下来一步一步搭建Kafka运行环境. Step 1: 下载Kafka 点击下载最新的版本并解压. > tar -xzf kafka_2.9.2-0.8.1.1.tgz > cd kafk ...
- 2017.2.20 activiti实战--第二章--搭建Activiti开发环境及简单示例(二)简单示例
学习资料:<Activiti实战> 第一章 搭建Activiti开发环境及简单示例 2.5 简单流程图及其执行过程 (1)leave.bpmn 后缀名必须是bpmn.安装了activiti ...
- activiti实战--第二章--搭建Activiti开发环境及简单示例
(一)搭建开发环境 学习资料:<Activiti实战> 第一章 认识Activiti 2.1 下载Activiti 官网:http://activiti.org/download.html ...
- 你必须知道的指针基础-1.预备篇:搭建GCC开发环境
一.关于GCC编译器 GCC(GNU Compiler Collection)是一套功能强大.性能优越的编程语言编译器,它是GNU计划的代表作品之一.GCC是Linux平台下最常用的编译器,GCC原名 ...
- 2017.2.20 activiti实战--第二章--搭建Activiti开发环境及简单示例(一)搭建开发环境
学习资料:<Activiti实战> 第一章 认识Activiti 2.1 下载Activiti 官网:http://activiti.org/download.html 进入下载页后,可以 ...
- vue第一篇(搭建vue开发环境)
1.下载node并安装 下载地址: https://nodejs.org/zh-cn/ 下载后双击文件安装 2.检查是否安装成功 node -v v10.16.0 npm -v 6.9.0 如果能正常 ...
- Visual Studio搭建Python开发环境
一.搭建开发环境 1.创建工程: 2.下载环境: 创建好工作以后,点击运行,就会出现下面这个界面,然后点击下载,并安装 http://jingyan.baidu.com/article/fec4bce ...
随机推荐
- Firefly卡牌手游《暗黑世界V1.5》服务器端源码+GM管理后台源码
http://www.9miao.com/content-6-304.html Firefly卡牌手游<暗黑世界V1.5>服务器端源码+GM管理后台源码 关于<暗黑世界V1.5> ...
- hdu5017 Ellipsoid(旋转)
比赛的时候跳进这个大坑里,最后代码是写出来了.看到好像很多都是模拟退火做的,下面提供一个奇怪的思路吧. ax^2+by^2+cz^2+dyz+exz+fxy=1(*) 通过一些奇特的YY我们可以知道这 ...
- VMware下Ubuntu与宿主Windows共享文件夹
概述1.安装VMware Tool2.设置共享 步骤开始安装VMware Tool 显示如下画面(如果宿主无法访问外网,可能会出现一个更新失败,可以无视之) 通过下列命令解压.执行,分别是下面的tar ...
- 读取excel文件内容代码
最近工作需要批量添加映射excel文件字段的代码 于是通过读取excel2007实现了批量生成代码,记录下代码 需要引入poi的jar包 import java.awt.List; import j ...
- SVN使用之分支、合并
首先说下为什么我们需要用到分支-合 并.比如项目demo下有两个小组,svn下有一个trunk版.由于客户需求突然变化,导致项目需要做较大改动,此时项目组决定由小组1继续完成原来正 进行到一半的工作[ ...
- lintcode:最小编辑距离
最小编辑距离 给出两个单词word1和word2,计算出将word1 转换为word2的最少操作次数. 你总共三种操作方法: 插入一个字符 删除一个字符 替换一个字符 样例 给出 work1=&quo ...
- I,P,B帧和PTS,DTS的关系
http://www.cnblogs.com/qingquan/archive/2011/07/27/2118967.html 基本概念: I frame :帧内编码帧 又称intra picture ...
- 跨浏览器兼容的HTML5视频音频播放器
HTML5的video和audio标签是用来在网页中加入视频和音频的标签,在支持html5的浏览器中不需要预先加载Adobe Flash浏览器插件就能轻松快速的播放视频和音频文件.而html5medi ...
- iOS 解析手势识别(Gesture Recognizers)
一.Gesture Recognizers Gesture Recognizers是在iOS3.2引入的,可以用来识别手势.简化定制视图事件处理的对象.Gesture Recognizers的基类为U ...
- Box2d学习
Box2d 学习 . Testbed 项目: Testbed 是 Box2d 源码里包含的一个例子程序.Testbed 封装了界面显示.游戏循环等部分的代码.添加新的例子的时候,只需要关注与 Box ...