【Kafka】实时看板案例
项目需求
快速计算双十一当天的订单量和销售金额
项目模型
实现步骤
一、创建topic
bin/kafka-topics.sh --create --topic itcast_order --zookeeper node01:2181,node02:2181,node03:2181 --partitions 5 --replication-factor 2
二、创建maven项目并导入要依赖的jar包
<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.10.0.0</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.41</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.9.0</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>cn.itcast.realboard.LogOperate</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId> maven-assembly-plugin </artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<mainClass>cn.itcast.realboard.LogOperate</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
三、消费生产代码实现
- 1.创建订单实体类
package cn.itcast.realboard; import com.alibaba.fastjson.JSONObject; import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Random;
import java.util.UUID; public class PaymentInfo {
private static final long serialVersionUID = -7958315778386204397L;
private String orderId;//订单编号
private Date createOrderTime;//订单创建时间
private String paymentId;//支付编号
private Date paymentTime;//支付时间
private String productId;//商品编号
private String productName;//商品名称
private long productPrice;//商品价格
private long promotionPrice;//促销价格
private String shopId;//商铺编号
private String shopName;//商铺名称
private String shopMobile;//商品电话
private long payPrice;//订单支付价格
private int num;//订单数量
/**
* <Province>19</Province>
* <City>1657</City>
* <County>4076</County>
*/
private String province; //省
private String city; //市
private String county;//县
//102,144,114
private String catagorys;
public String getProvince() {
return province;
}
public void setProvince(String province) {
this.province = province;
} public String getCity() {
return city;
} public void setCity(String city) {
this.city = city;
} public String getCounty() {
return county;
} public void setCounty(String county) {
this.county = county;
} public String getCatagorys() {
return catagorys;
} public void setCatagorys(String catagorys) {
this.catagorys = catagorys;
} public PaymentInfo() {
} public PaymentInfo(String orderId, Date createOrderTime, String paymentId, Date paymentTime, String productId, String productName, long productPrice, long promotionPrice, String shopId, String shopName, String shopMobile, long payPrice, int num) {
this.orderId = orderId;
this.createOrderTime = createOrderTime;
this.paymentId = paymentId;
this.paymentTime = paymentTime;
this.productId = productId;
this.productName = productName;
this.productPrice = productPrice;
this.promotionPrice = promotionPrice;
this.shopId = shopId;
this.shopName = shopName;
this.shopMobile = shopMobile;
this.payPrice = payPrice;
this.num = num;
} public String getOrderId() {
return orderId;
} public void setOrderId(String orderId) {
this.orderId = orderId;
} public Date getCreateOrderTime() {
return createOrderTime;
} public void setCreateOrderTime(Date createOrderTime) {
this.createOrderTime = createOrderTime;
} public String getPaymentId() {
return paymentId;
} public void setPaymentId(String paymentId) {
this.paymentId = paymentId;
} public Date getPaymentTime() {
return paymentTime;
} public void setPaymentTime(Date paymentTime) {
this.paymentTime = paymentTime;
} public String getProductId() {
return productId;
} public void setProductId(String productId) {
this.productId = productId;
} public String getProductName() {
return productName;
} public void setProductName(String productName) {
this.productName = productName;
} public long getProductPrice() {
return productPrice;
} public void setProductPrice(long productPrice) {
this.productPrice = productPrice;
} public long getPromotionPrice() {
return promotionPrice;
} public void setPromotionPrice(long promotionPrice) {
this.promotionPrice = promotionPrice;
} public String getShopId() {
return shopId;
} public void setShopId(String shopId) {
this.shopId = shopId;
} public String getShopName() {
return shopName;
} public void setShopName(String shopName) {
this.shopName = shopName;
} public String getShopMobile() {
return shopMobile;
} public void setShopMobile(String shopMobile) {
this.shopMobile = shopMobile;
} public long getPayPrice() {
return payPrice;
} public void setPayPrice(long payPrice) {
this.payPrice = payPrice;
} public int getNum() {
return num;
} public void setNum(int num) {
this.num = num;
} @Override
public String toString() {
return "PaymentInfo{" +
"orderId='" + orderId + '\'' +
", createOrderTime=" + createOrderTime +
", paymentId='" + paymentId + '\'' +
", paymentTime=" + paymentTime +
", productId='" + productId + '\'' +
", productName='" + productName + '\'' +
", productPrice=" + productPrice +
", promotionPrice=" + promotionPrice +
", shopId='" + shopId + '\'' +
", shopName='" + shopName + '\'' +
", shopMobile='" + shopMobile + '\'' +
", payPrice=" + payPrice +
", num=" + num +
'}';
} public String random() throws ParseException {
this.orderId = UUID.randomUUID().toString().replaceAll("-", "");
this.paymentId = UUID.randomUUID().toString().replaceAll("-", "");
this.productPrice = new Random().nextInt(1000);
this.promotionPrice = new Random().nextInt(500);
this.payPrice = new Random().nextInt(480);
this.shopId = new Random().nextInt(200000)+""; this.catagorys = new Random().nextInt(10000)+","+new Random().nextInt(10000)+","+new Random().nextInt(10000);
this.province = new Random().nextInt(23)+"";
this.city = new Random().nextInt(265)+"";
this.county = new Random().nextInt(1489)+""; String date = "2015-11-11 12:22:12";
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
this.createOrderTime = simpleDateFormat.parse(date);
} catch (ParseException e) {
e.printStackTrace();
}
JSONObject obj = new JSONObject();
String jsonString = obj.toJSONString(this);
return jsonString;
// return new Gson().toJson(this);
}
}
- 2.定义log4j.properties配置文件
在/src/main/resources/
目录下创建log4h.properties
文件### 设置###
log4j.rootLogger = debug,stdout,D,E ### 输出信息到控制抬 ###
log4j.appender.stdout = org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target = System.out
log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern = [%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} method:%l%n%m%n ### 输出DEBUG 级别以上的日志到=E://logs/error.log ###
log4j.appender.D = org.apache.log4j.DailyRollingFileAppender
#log4j.appender.D.File = /Users/zhaozhuang/Desktop/logs/log.log
log4j.appender.D.File = /export/servers/orderLogs/orderinfo.log
log4j.appender.D.Append = true
log4j.appender.D.Threshold = DEBUG
log4j.appender.D.layout = org.apache.log4j.PatternLayout
#log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
log4j.appender.D.layout.ConversionPattern = %m%n ### 输出ERROR 级别以上的日志到=E://logs/error.log ###
log4j.appender.E = org.apache.log4j.DailyRollingFileAppender
#log4j.appender.E.File = /Users/zhaozhuang/Desktop/logs/error.log
log4j.appender.E.File = /export/servers/orderLogs/ordererror.log
log4j.appender.E.Append = true
log4j.appender.E.Threshold = ERROR
log4j.appender.E.layout = org.apache.log4j.PatternLayout
#log4j.appender.E.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
log4j.appender.E.layout.ConversionPattern = %m%n
3.开发日志生产代码
package cn.itcast.realboard; import org.apache.log4j.Logger; import java.text.ParseException; public class LogOperate {
private static Logger printLogger = Logger.getLogger("printLogger");
public static void main(String[] args) throws ParseException, InterruptedException {
PaymentInfo paymentInfo = new PaymentInfo();
while(true){
String random = paymentInfo.random();
System.out.println(random);
printLogger.info(random);
Thread.sleep(1000);
}
}
}
4.将程序打包并上传服务器运行
上传后执行以下命令
java -jar day10_Project-1.0-SNAPSHOT-jar-with-dependencies.jar
5.开发flume配置文件,实现收集数据到kafka
开发flume配置文件
cd /export/servers/apache-flume-1.6.0-cdh5.14.0-bin/
vim file_kafka.conf
#为我们的source channel sink起名
a1.sources = r1
a1.channels = c1
a1.sinks = k1
#指定我们的source收集到的数据发送到哪个管道
a1.sources.r1.channels = c1
#指定我们的source数据收集策略
a1.sources.r1.type = TAILDIR
a1.sources.r1.positionFile = /var/log/flume/taildir_position.json
a1.sources.r1.filegroups = f1
a1.sources.r1.filegroups.f1 = /export/servers/orderLogs/orderinfo.log #指定我们的channel为memory,即表示所有的数据都装进memory当中
a1.channels.c1.type = memory
#指定我们的sink为kafka sink,并指定我们的sink从哪个channel当中读取数据
a1.sinks.k1.channel = c1
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.kafka.topic = itcast_order
a1.sinks.k1.kafka.bootstrap.servers = node01:9092,node02:9092,node03:9092
a1.sinks.k1.kafka.flumeBatchSize = 20
a1.sinks.k1.kafka.producer.acks = 1
启动flume
bin/flume-ng agent -c conf/ -f conf/file_kafka.conf -n a1
6.启动kafka,开始消费数据
cd /export/servers/kafka_2.11-1.0.0/
bin/kafka-console-consumer.sh --from-beginning --bootstrap-server node01:9092,node02:9092,node03:9092 --topic itcast_order
四、消费消费代码实现
- 1.定义redis工具类
package cn.itcast.realboard; import org.junit.Test;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig; public class JedisUtils {
private static JedisPool pool = null;
/**
* 获取jedis连接池
**/
public static JedisPool getPool(){
if(pool == null){
//创建jedis连接池配置
JedisPoolConfig config = new JedisPoolConfig();
//最大连接数
config.setMaxTotal(20);
//最大空闲连接
config.setMaxIdle(5);
//创建redis连接池
pool = new JedisPool(config,"node01",6379,3000);
}
return pool;
} /**
* 获取jedis连接
**/
public static Jedis getConn(){
return getPool().getResource();
} @Test
public void getJedisTest() throws Exception {
Jedis jedis = getPool().getResource();
jedis.incrBy("mine", 5);
jedis.close();
}
}
- 2.开发Kafka消费代码
package cn.itcast.realboard; import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool; import java.util.*; public class MyKafkaConsumer {
/**
* 消费itcast_order里面的数据
*
* @param args
*/
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "node01:9092");
props.put("group.id", "test");
//以下两行代码 ---消费者自动提交offset值
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("enable.auto.commit", "false");
props.put("auto.commit.interval.ms", "1000");
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(props);
kafkaConsumer.subscribe(Arrays.asList("itcast_order")); while (true) {
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(3000);
//获取所有分区
Set<TopicPartition> partitions = consumerRecords.partitions();
for (TopicPartition topicPartition : partitions) {
List<ConsumerRecord<String, String>> records = consumerRecords.records(topicPartition);
for (ConsumerRecord<String, String> record : records) {
//获取Jedis客户端
JedisPool jedisPool = JedisUtils.getPool();
Jedis jedis = jedisPool.getResource(); //获取json字符串
String value = record.value(); //把字符串转换成对象
PaymentInfo paymentInfo = JSONObject.parseObject(value, PaymentInfo.class); //获取付款额度
long payPrice = paymentInfo.getPayPrice(); //redis中的Key一般是约定俗成的,实际工作中一般都有业务逻辑 //求取平台销售总额度
jedis.incrBy("itcast:order:total:price:date",payPrice);
//求取平台下单人数
jedis.incr("itcast:order:total:user:date");
//平台销售数量(简单认为一个订单一个商品)
jedis.incr("itcast:order:total:num:date"); //每个商品的销售额
jedis.incrBy("itcast:order:"+paymentInfo.getProductId()+"price:date",payPrice);
//每个商品的购买人数
jedis.incr("itcast:order:"+paymentInfo.getProductId()+":user:date");
//每个商品销售数量
jedis.incr("itcast:order:"+paymentInfo.getProductId()+":num:date"); //每个店铺的总销售额
jedis.incrBy("itcast:order:"+paymentInfo.getShopId()+":price:date",payPrice);
//每个店铺的购买人数
jedis.incr("itcast:order:"+paymentInfo.getShopId()+":user:date");
//每个店铺的销售数量
jedis.incr("itcast:order:"+paymentInfo.getShopId()+":num:date"); jedis.close(); //处理业务逻辑
/**
平台运维角度统计指标
平台总销售额度
redisRowKey设计 itcast:order:total:price:date
平台今天下单人数
redisRowKey设计 itcast:order:total:user:date
平台商品销售数量
redisRowKey设计 itcast:order:total:num:date 商品销售角度统计指标
每个商品的总销售额
Redis的rowKey设计itcast:order:productId:price:date
每个商品的购买人数
Redis的rowKey设计itcast:order:productId:user:date
每个商品的销售数量
Redis的rowKey设计itcast:order:productId:num:date 店铺销售角度统计指标
每个店铺的总销售额
Redis的rowKey设计itcast:order:shopId:price:date
每个店铺的购买人数
Redis的rowKey设计itcast:order:shopId:user:date
每个店铺的销售数量
Redis的rowKey设计itcast:order:shopId:num:date
*/ }
long offset = records.get(records.size() - 1).offset();
Map<TopicPartition, OffsetAndMetadata> topicPartitionOffsetAndMetadataMap = Collections.singletonMap(topicPartition, new OffsetAndMetadata(offset));
kafkaConsumer.commitSync(topicPartitionOffsetAndMetadataMap);
}
}
}
}
【Kafka】实时看板案例的更多相关文章
- 第2节 storm实时看板案例:9、实时看板综合案例
=================================== 10.实时看板案例 10.1 项目需求梳理 根据订单mq,快速计算双11当天的订单量.销售金额.
- 第2节 storm实时看板案例:12、实时看板综合案例代码完善;13、今日课程总结
详见代码 将任务提交到集群上面去运行 apache-storm-1.1.1/bin/storm jar cn.itcast.storm.kafkaAndStorm.KafkTopology kafka ...
- 大数据Spark+Kafka实时数据分析案例
本案例利用Spark+Kafka实时分析男女生每秒购物人数,利用Spark Streaming实时处理用户购物日志,然后利用websocket将数据实时推送给浏览器,最后浏览器将接收到的数据实时展现, ...
- 第2节 storm实时看板案例:11、实时看板综合案例工程构建,redis的专业术语
redis当中的一些专业术语: redis缓存击穿 redis缓存雪崩 redis的缓存淘汰 =========================================== 详见代码
- 第2节 storm实时看板案例:10、redis的安装使用回顾
2.redis的持久化机制: redis支持两种持久化机制:RDB AOF RDB:多少秒之内,有多少个key放生变化,将redis当中的数据dump到磁盘保存,保存成一个文件,下次再恢复的时候,首 ...
- 59、Spark Streaming与Spark SQL结合使用之top3热门商品实时统计案例
一.top3热门商品实时统计案例 1.概述 Spark Streaming最强大的地方在于,可以与Spark Core.Spark SQL整合使用,之前已经通过transform.foreachRDD ...
- Kafka吞吐量测试案例
Kafka吞吐量测试案例 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任. 领英公司参考连接:https://www.slideshare.net/JiangjieQin/produc ...
- Spark+Kafka实时监控Oracle数据预警
目标: 监控Oracle某张记录表,有新增数据则获取表数据,并推送到微信企业. 流程: Kafka实时监控Oracle指定表,获取该表操作信息(日志),使用Spark Structured Strea ...
- Spark集成Kafka实时流计算Java案例
package com.test; import java.util.*; import org.apache.spark.SparkConf; import org.apache.spark.Tas ...
随机推荐
- 选择IT行业的自我心得,希望能帮助到各位!(二)
在前面说道的一,希望大家能喜欢,这也只是自己的一种经历,每个人都有年轻的时候,谁的年级都有自以为是,谁的年轻都有风华正茂,谁的年轻都让自己的内涵给我们自己摔了一个狠狠的道理,人外有人天外有天.我记得当 ...
- 同事上班时间无聊,用python敲出贪吃蛇游戏打发时间
自从学会啦python,再也不用担心上班时间老板发现我打游戏啦 贪吃蛇代码: 还有不懂的(https://www.ixigua.com/i6808019824560570888/)这里有视频教程. 如 ...
- pickle\json,configparser,hashlib模块
python常用模块 目录 python常用模块 json模块\pickle模块 configparser模块 hashlib模块 subprocess模块 json模块\pickle模块 首先说一下 ...
- P1464 Function
Function 简 单 的 递 归 这道题一开始十分智障地用递归做,虽然知道没那么简单,但还是冒着送死的心态交了一遍,果然,如我所料 样例输入: 密密麻麻,几万行的样例输入 //:) ...
- 2. js的异步
1. 回掉2. promise3. Generator4. Async/await
- Os-Hax: 1 靶机记录
靶机地址:172.16.1.197 Kali地址:172.16.1.108 1 信息搜集 靶机首页 相关信息查看 端口扫描: 开放22和80 目录扫描: 访问http://172.16.1.197/c ...
- CTFHub web技能树之RCE初步 命令注入+过滤cat
在一个大佬的突然丢了个题过来,于是去玩了玩rce的两道题 大佬的博客跳转链接在此->>>大佬召唤机 叫 命令注入 一上来就是源码出现,上面有个ping的地方 <?php $re ...
- DataTable 与XML 交互
一.将DataTable的内容写入到XML文件中 /// <summary> /// 将DataTable的内容写入到XML文件中 /// </summary> /// < ...
- GIT分布式版本控制
1.1Git简介 linus 用C语言编写 2005年诞生 分布式版本管理系统 速度快,适合大规模,跨地区多人协同开发 Git不仅是一款开源的分布式版本控制系统,而且有其独特的功能特性,例如大多数的分 ...
- 数据挖掘入门系列教程(十一点五)之CNN网络介绍
在前面的两篇博客中,我们介绍了DNN(深度神经网络)并使用keras实现了一个简单的DNN.在这篇博客中将介绍CNN(卷积神经网络),然后在下一篇博客中将使用keras构建一个简单的CNN,对cifa ...