//生产端 产生数据

/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.examples;

import java.util.Properties;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public class Producer extends Thread {
private final kafka.javaapi.producer.Producer<Integer, String> producer;
private final String topic;
private final Properties props = new Properties();

public Producer(String topic) {
props.put("serializer.class", "kafka.serializer.StringEncoder");// 字符串消息
props.put("metadata.broker.list",
"192.168.1.155:9092,192.168.1.156:9092");
// Use random partitioner. Don't need the key type. Just set it to
// Integer.
// The message is of type String.
producer = new kafka.javaapi.producer.Producer<Integer, String>(
new ProducerConfig(props));
this.topic = topic;
}

public void run() {
for (int i = 0; i < 2000; i++) {
String messageStr = new String("Message_" + i);
System.out.println("product:"+messageStr);
producer.send(new KeyedMessage<Integer, String>(topic, messageStr));
}

}

public static void main(String[] args) {
Producer producerThread = new Producer(KafkaProperties.topic);
producerThread.start();
}
}

//消费端 消费数据

/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.examples;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;

public class Consumer extends Thread {
private final ConsumerConnector consumer;
private final String topic;

public Consumer(String topic) {
consumer = kafka.consumer.Consumer
.createJavaConsumerConnector(createConsumerConfig());//创建kafka时传入配置文件
this.topic = topic;
}
//配置kafka的配置文件项目
private static ConsumerConfig createConsumerConfig() {
Properties props = new Properties();
props.put("zookeeper.connect", KafkaProperties.zkConnect);
props.put("group.id", KafkaProperties.groupId);//相同的kafka groupID会给同一个customer消费
props.put("zookeeper.session.timeout.ms", "400");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "60000");//

return new ConsumerConfig(props);

}
// push消费方式,服务端推送过来。主动方式是pull
public void run() {
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, new Integer(1));//先整体存到Map中
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer
.createMessageStreams(topicCountMap);//用consumer创建message流然后放入到consumerMap中
KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);//再从流里面拿出来进行迭代
ConsumerIterator<byte[], byte[]> it = stream.iterator();

while (it.hasNext()){
//逻辑处理
System.out.println(new String(it.next().message()));

}

}

public static void main(String[] args) {
Consumer consumerThread = new Consumer(KafkaProperties.topic);
consumerThread.start();
}
}

kafka例子程序的更多相关文章

  1. 手把手教你写Kafka Streams程序

    本文从以下四个方面手把手教你写Kafka Streams程序: 一. 设置Maven项目 二. 编写第一个Streams应用程序:Pipe 三. 编写第二个Streams应用程序:Line Split ...

  2. [OSG]OSG例子程序简介

    1.example_osganimate一)演示了路径动画的使用(AnimationPath.AnimationPathCallback),路径动画回调可以作用在Camera.CameraView.M ...

  3. C语言字符串函数例子程序大全 – string相关

    关于字符串函数的应用细则,例子程序 – jerny 函数名: stpcpy 功 能: 拷贝一个字符串到另一个 用 法: char *stpcpy(char *destin, char *source) ...

  4. Spring Cloud Eureka Server例子程序

    Spring-Cloud-Eureka-Server 及Client 例子程序 参考源代码:https://github.com/spring-cloud-samples/eureka 可以启动成功, ...

  5. [原][OSG]OSG例子程序简介

    1.example_osganimate一)演示了路径动画的使用(AnimationPath.AnimationPathCallback),路径动画回调可以作用在Camera.CameraView.M ...

  6. ActiveMQ学习--002--Topic消息例子程序

    一.非持久的Topic消息示例 注意 此种方式消费者只能接收到 消费者启动之后,发送者发送的消息. 发送者 package com.lhy.mq.helloworld; import java.uti ...

  7. 运行hadoop自带的wordcount例子程序

    1.准备文件 [root@master ~]# cat input.txt hello java hello python hello c hello java hello js hello html ...

  8. [OSG][osgEarth]osgEarth例子程序简介

    1.osgearth_graticule:生成经纬线. 2.osgearth_annotation:各类标注(点.线.面.模型.文本等). 3.osgearth_city:加载一个城市三维模型,可以浏 ...

  9. 如何快速地编写和运行一个属于自己的 MapReduce 例子程序

    大数据的时代, 到处张嘴闭嘴都是Hadoop, MapReduce, 不跟上时代怎么行? 可是对一个hadoop的新手, 写一个属于自己的MapReduce程序还是小有点难度的, 需要建立一个mave ...

随机推荐

  1. 7_1.springboot2.x启动配置原理_1.创建SpringApplication对象

    环境准备 springboot2.1.9.idea2019. pom.xml 解析 几个重要的事件回调机制 配置在META-INF/spring.factories ApplicationContex ...

  2. python字典的常用操作,数据类型划分

    一.数据类型划分之一 可分为:可变数据类型,不可变数据类型 不可变数据类型:元祖,布尔值(Bool),数值 int ,字符串 str               可哈希 可变数据类型:  list,d ...

  3. loj6031「雅礼集训 2017 Day1」字符串

    题目 首先先对\(s\)建一个\(\operatorname{SAM}\),设\(w=kq\) 发现\(k,q\leq 10^5\),但是\(w\leq 10^5\),于是套路地根号讨论一下 如果\( ...

  4. 在VMare Workstation 10中安装Ubuntu

    (1) 下面就是成功配置了VM的环境 (2)在弹出的settings里,点击"CD/DVD(IDE)",然后在右侧点击"Use ISO image file", ...

  5. seienium基础(测试脚本中的等待方法)

    测试脚本中的等待方法 一.加等待时间的目的 等待是为了使脚本执行更加稳定 二.常用的休眠方式 第一种  sleep(): 设置固定休眠时间.python 的 time 包提供了休眠方法 sleep() ...

  6. LeetCode 8.字符串转换整数 (atoi)(Python3)

    题目: 请你来实现一个 atoi 函数,使其能将字符串转换成整数. 首先,该函数会根据需要丢弃无用的开头空格字符,直到寻找到第一个非空格的字符为止. 当我们寻找到的第一个非空字符为正或者负号时,则将该 ...

  7. Amazon S3和EBS的区别

  8. react+redux+react-redux练习项目

    一,项目目录 二.1.新建pages包,在pages中新建TodoList.js: 2.新建store包,在store包中新建store.js,reducer.js,actionCreater.js, ...

  9. jdk11.0.2安装

    1.去官网下载合适的jdk 网址:https://www.oracle.com/technetwork/java/javase/downloads/jdk11-downloads-5066655.ht ...

  10. SpringData_04_ JPA中的一对多

    1.JPA中的一对多 在一对多关系中,我们习惯把一的一方称之为主表,把多的一方称之为从表.在数据库中建立一对多的关系,需要使用数据库的外键约束. 什么是外键? 指的是从表中有一列,取值参照主表的主键, ...