1,代码

package mr;

import java.io.IOException;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; /**
* 使用ArrayWritable
*/
public class TrafficApp4 { public static void main(String[] args) throws Exception{
Configuration conf = new Configuration();
Job job = Job.getInstance(conf , TrafficApp4.class.getSimpleName());
job.setJarByClass(TrafficApp4.class); FileInputFormat.setInputPaths(job, args[]);
job.setMapperClass(TrafficMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongArrayWritable.class); job.setReducerClass(TrafficReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongArrayWritable.class);
FileOutputFormat.setOutputPath(job, new Path(args[])); job.waitForCompletion(true);
} public static class TrafficMapper extends Mapper<LongWritable, Text, Text, LongArrayWritable>{
@Override
protected void map(LongWritable key, Text value,
Mapper<LongWritable, Text, Text, LongArrayWritable>.Context context)
throws IOException, InterruptedException {
String line = value.toString();
String[] splited = line.split("\t");
String phonenumber = splited[]; String upPackNum = splited[];
String downPackNum = splited[];
String upPayLoad = splited[];
String downPayLoad = splited[]; Text k2 = new Text(phonenumber);
LongArrayWritable v2 = new LongArrayWritable(upPackNum, downPackNum, upPayLoad, downPayLoad);
context.write(k2, v2);
}
} public static class TrafficReducer extends Reducer<Text, LongArrayWritable, Text, LongArrayWritable>{
@Override
protected void reduce(Text k2, Iterable<LongArrayWritable> v2s,
Reducer<Text, LongArrayWritable, Text, LongArrayWritable>.Context context)
throws IOException, InterruptedException { long upPackNum = 0L;
long downPackNum = 0L;
long upPayLoad = 0L;
long downPayLoad = 0L;
for (LongArrayWritable v2 : v2s) {
Writable[] values = v2.get();
upPackNum += ((LongWritable)values[]).get();
downPackNum += ((LongWritable)values[]).get();
upPayLoad += ((LongWritable)values[]).get();
downPayLoad += ((LongWritable)values[]).get();
} LongArrayWritable v3 = new LongArrayWritable(upPackNum, downPackNum, upPayLoad, downPayLoad);
context.write(k2, v3);
}
} public static class LongArrayWritable extends ArrayWritable{
/**
* 在调用的时候,首先调用该方法,然后调用set(Writable[])
*/
public LongArrayWritable() {
super(LongWritable.class);
}
/**
* 直接调用该方法即可
* @param values
*/
public LongArrayWritable(LongWritable[] values) {
super(LongWritable.class, values);
}
/**
* 直接调用该方法即可
* @param upPackNum
* @param downPackNum
* @param upPayLoad
* @param downPayLoad
*/
public LongArrayWritable(Long upPackNum, Long downPackNum, Long upPayLoad, Long downPayLoad) {
super(LongWritable.class);
LongWritable[] values = new LongWritable[];
values[] = new LongWritable(upPackNum);
values[] = new LongWritable(downPackNum);
values[] = new LongWritable(upPayLoad);
values[] = new LongWritable(downPayLoad);
super.set(values);
}
/**
* 直接调用该方法即可
* @param upPackNum
* @param downPackNum
* @param upPayLoad
* @param downPayLoad
*/
public LongArrayWritable(String upPackNum, String downPackNum, String upPayLoad, String downPayLoad) {
super(LongWritable.class);
LongWritable[] values = new LongWritable[];
values[] = new LongWritable(Long.parseLong(upPackNum));
values[] = new LongWritable(Long.parseLong(downPackNum));
values[] = new LongWritable(Long.parseLong(upPayLoad));
values[] = new LongWritable(Long.parseLong(downPayLoad));
super.set(values);
} @Override
public String toString() {
String[] array = super.toStrings();
return StringUtils.join(array, "\t");
}
} }

2,ArrayWritable的API

org.apache.hadoop.io
Class ArrayWritable

java.lang.Object
  

org.apache.hadoop.io.ArrayWritable
已实现的接口:
Writable

public class ArrayWritableextends Objectimplements Writable

A Writable for arrays containing instances of a class. The elements of this writable must all be instances of the same class. If this writable will be the input for a Reducer, you will need to create a subclass that sets the value to be of the proper type. For example: public class IntArrayWritable extends ArrayWritable { public IntArrayWritable() { super(IntWritable.class); } }


构造方法摘要
ArrayWritable(Class<? extends Writable> valueClass) 
           
ArrayWritable(Class<? extends Writable> valueClass, Writable[] values) 
           
ArrayWritable(String[] strings) 
           
方法摘要
 Writable[] get() 
           
 Class getValueClass() 
           
 void readFields(DataInput in) 
          Deserialize the fields of this object from in.
 void set(Writable[] values) 
           
 Object toArray() 
           
 String[] toStrings() 
           
 void write(DataOutput out) 
          Serialize the fields of this object to out.
Methods inherited from class java.lang.Object
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
构造方法详细信息

ArrayWritable

public ArrayWritable(Class<? extends Writable> valueClass)

ArrayWritable

public ArrayWritable(Class<? extends Writable> valueClass,
Writable[] values)

ArrayWritable

public ArrayWritable(String[] strings)
方法详细信息

getValueClass

public Class getValueClass()

toStrings

public String[] toStrings()

toArray

public Object toArray()

set

public void set(Writable[] values)

get

public Writable[] get()

readFields

public void readFields(DataInput in)
throws IOException
Description copied from interface: Writable
Deserialize the fields of this object from in.

For efficiency, implementations should attempt to re-use storage in the existing object where possible.

Specified by:
readFields in interface Writable
Parameters:
in - DataInput to deseriablize this object from.
Throws:
IOException

write

public void write(DataOutput out)
throws IOException
Description copied from interface: Writable
Serialize the fields of this object to out.

Specified by:
write in interface Writable
Parameters:
out - DataOuput to serialize this object into.
Throws:
IOException

 

 

基于MapReduce的手机流量统计分析的更多相关文章

  1. MapReduce的手机流量统计的案例

    程序:(另外一个关于单词计数的总结:http://www.cnblogs.com/DreamDrive/p/5492572.html) import java.io.IOException; impo ...

  2. 基于winpcap的以太网流量分析器(java)

    开发工具 IDE:eclipse -neon JDK:1.8 OS:Win10-64bit 主要功能 1.要求完成一个基于Winpcap的网络流量统计分析系统,具有易用.美观的界面. 2.完成局域网( ...

  3. 023_数量类型练习——Hadoop MapReduce手机流量统计

    1) 分析业务需求:用户使用手机上网,存在流量的消耗.流量包括两部分:其一是上行流量(发送消息流量),其二是下行流量(接收消息的流量).每种流量在网络传输过程中,有两种形式说明:包的大小,流量的大小. ...

  4. 第2节 mapreduce深入学习:8、手机流量汇总求和

    第2节 mapreduce深入学习:8.手机流量汇总求和 例子:MapReduce综合练习之上网流量统计. 数据格式参见资料夹 需求一:统计求和 统计每个手机号的上行流量总和,下行流量总和,上行总流量 ...

  5. MapReduce 经典案例手机流量排序的分析

    在进行流量排序之前,先要明白排序是发生在map阶段,排序之后(排序结束后map阶段才会显示100%完成)才会到reduce阶段(事实上reduce也会排序),.此外排序之前要已经完成了手机流量的统计工 ...

  6. 基于mapreduce的大规模连通图寻找算法

    基于mapreduce的大规模连通图寻找算法 当我们想要知道哪些账号是一个人的时候往往可以通过业务得到两个账号之间有联系,但是这种联系如何传播呢? 问题 已知每个账号之间的联系 如: A B B C ...

  7. 字节数转换为b,kb,mb,gb的方法 通用的手机流量计算方法

    //通用的手机流量计算方法 private String byteToMB(long size){ long kb = 1024; long mb = kb*1024; long gb = mb*10 ...

  8. MapReduce教程(一)基于MapReduce框架开发<转>

    1 MapReduce编程 1.1 MapReduce简介 MapReduce是一种编程模型,用于大规模数据集(大于1TB)的并行运算,用于解决海量数据的计算问题. MapReduce分成了两个部分: ...

  9. 基于MapReduce的贝叶斯网络算法研究参考文献

    原文链接(系列):http://blog.csdn.net/XuanZuoNuo/article/details/10472219 论文: 加速贝叶斯网络:Accelerating Bayesian ...

随机推荐

  1. Linux硬盘性能检测

    对于现在的计算机来讲,整个计算机的性能主要受磁盘IO速度的影响,内存.CPU包括主板总线的速度已经很快了. 基础检测方法 1.dd命令 dd命令功能很简单,就是从一个源读取数据以bit级的形式写到一个 ...

  2. Android Studio 使用小结

    从去年(2013年5月)Google发布Android Studio 0.1.0版本,到如今已经一年多了,已经升级到0.8.6 Beta版 ,从刚开始大家报怨bug多,编译困难,到如今已经基本趋于稳定 ...

  3. 【廖雪峰老师python教程】——装饰器

    装饰器 # 一个函数装饰器的列子 def log(func): def wrapper(*args,**kwargs): print('Name=%s'%func.__name__) return f ...

  4. 「日常训练」「小专题·USACO」 Wormholes(1-4)

    题意 之后补充. 分析 这是一条很好的考察递归(或者说搜索)的题目.它的两个过程(建立初步解,验证)都用到了递归(或者说运用递归可以相当程度的减少代码量). 具体实现见代码.注意,为了使用std::p ...

  5. TF-IDF与主题模型 - NLP学习(3-2)

    分词(Tokenization) - NLP学习(1) N-grams模型.停顿词(stopwords)和标准化处理 - NLP学习(2) 文本向量化及词袋模型 - NLP学习(3-1) 在上一篇博文 ...

  6. 相关系数之杰卡德相似系数(Jaccardsimilarity coefficient)

    杰卡德相似系数(Jaccardsimilarity coefficient) (1)杰卡德相似系数 两个集合A和B交集元素的个数在A.B并集中所占的比例,称为这两个集合的杰卡德系数,用符号 J(A,B ...

  7. MySQL 初识01

    最近开始学习MySQL 所以将这两天所学习到的知识简单小结一下 1.status 显示数据库信息 2.数据类型: a.字符串: char(m):固定长度的字符,最多255个字符: varchar(m) ...

  8. php laravel 框架搭建与运行

    目录 安装 composer 安装 laravel 运行 php hello world 一.安装 composer (mac) 下载 composer.phar 下载地址:https://getco ...

  9. js & disabled mouse right button menus

    js & disabled mouse right button menus 网页可以屏蔽 F12 https://www.cnblogs.com/Marydon20170307/p/9122 ...

  10. JAVA实现定时器功能

    在接口开发时,有一种开发模式叫定时器模式,可以理解为每经过一段预设的时间就会执行一次事件,而在我们的工作中,这个事件所实现的功能一般是将两个系统的数据信息进行同步,这样就实现了两个系统通过接口进行对接 ...