/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate import java.lang.Iterable import org.apache.flink.api.common.functions.{MapPartitionFunction, RichGroupReduceFunction}
import org.apache.flink.configuration.Configuration
import org.apache.flink.table.codegen.{Compiler, GeneratedAggregationsFunction}
import org.apache.flink.table.util.Logging
import org.apache.flink.types.Row
import org.apache.flink.util.Collector /**
* [[RichGroupReduceFunction]] and [[MapPartitionFunction]] to compute aggregates that do
* not support pre-aggregation for batch(DataSet) queries.
*
* @param genAggregations Code-generated [[GeneratedAggregations]]
*/
class DataSetAggFunction(
private val genAggregations: GeneratedAggregationsFunction)
extends RichGroupReduceFunction[Row, Row]
with MapPartitionFunction[Row, Row]
with Compiler[GeneratedAggregations] with Logging { private var output: Row = _
private var accumulators: Row = _ private var function: GeneratedAggregations = _ override def open(config: Configuration) {
LOG.debug(s"Compiling AggregateHelper: $genAggregations.name \n\n " +
s"Code:\n$genAggregations.code")
val clazz = compile(
getRuntimeContext.getUserCodeClassLoader,
genAggregations.name,
genAggregations.code)
LOG.debug("Instantiating AggregateHelper.")
function = clazz.newInstance() output = function.createOutputRow()
accumulators = function.createAccumulators()
} /**
* Computes a non-pre-aggregated aggregation.
*
* @param records An iterator over all records of the group.
* @param out The collector to hand results to.
*/
override def reduce(records: Iterable[Row], out: Collector[Row]): Unit = { // reset accumulators
function.resetAccumulator(accumulators) val iterator = records.iterator() var record: Row = null
while (iterator.hasNext) {
record = iterator.next() // accumulate
function.accumulate(accumulators, record)
} // set group keys value to final output
function.setForwardedFields(record, output) // set agg results to output
function.setAggregationResults(accumulators, output) out.collect(output)
} /**
* Computes a non-pre-aggregated aggregation and returns a row even if the input is empty.
*
* @param records An iterator over all records of the partition.
* @param out The collector to hand results to.
*/
override def mapPartition(records: Iterable[Row], out: Collector[Row]): Unit = {
reduce(records, out)
} } 、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、、
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate import java.lang.Iterable import org.apache.flink.api.common.functions.RichGroupReduceFunction
import org.apache.flink.configuration.Configuration
import org.apache.flink.table.codegen.{Compiler, GeneratedAggregationsFunction}
import org.apache.flink.table.util.Logging
import org.apache.flink.types.Row
import org.apache.flink.util.Collector /**
* It wraps the aggregate logic inside of
* [[org.apache.flink.api.java.operators.GroupReduceOperator]]. It is used for tumbling time-window
* on batch.
*
* @param genAggregations Code-generated [[GeneratedAggregations]]
* @param windowSize Tumbling time window size
* @param windowStartPos The relative window-start field position to the last field of output row
* @param windowEndPos The relative window-end field position to the last field of output row
* @param windowRowtimePos The relative window-rowtime field position to the last field of
* output row
* @param keysAndAggregatesArity The total arity of keys and aggregates
*/
class DataSetTumbleTimeWindowAggReduceGroupFunction(
genAggregations: GeneratedAggregationsFunction,
windowSize: Long,
windowStartPos: Option[Int],
windowEndPos: Option[Int],
windowRowtimePos: Option[Int],
keysAndAggregatesArity: Int)
extends RichGroupReduceFunction[Row, Row]
with Compiler[GeneratedAggregations]
with Logging { private var collector: DataSetTimeWindowPropertyCollector = _
protected var aggregateBuffer: Row = new Row(keysAndAggregatesArity + 1) private var output: Row = _
protected var accumulators: Row = _ protected var function: GeneratedAggregations = _ override def open(config: Configuration) {
LOG.debug(s"Compiling AggregateHelper: $genAggregations.name \n\n " +
s"Code:\n$genAggregations.code")
val clazz = compile(
getRuntimeContext.getUserCodeClassLoader,
genAggregations.name,
genAggregations.code)
LOG.debug("Instantiating AggregateHelper.")
function = clazz.newInstance() output = function.createOutputRow()
accumulators = function.createAccumulators()
collector = new DataSetTimeWindowPropertyCollector(
windowStartPos,
windowEndPos,
windowRowtimePos)
} override def reduce(records: Iterable[Row], out: Collector[Row]): Unit = { var last: Row = null
val iterator = records.iterator() // reset accumulator
function.resetAccumulator(accumulators) while (iterator.hasNext) {
val record = iterator.next()
function.mergeAccumulatorsPair(accumulators, record)
last = record
} // set group keys value to final output.
function.setForwardedFields(last, output) // get final aggregate value and set to output.
function.setAggregationResults(accumulators, output) // get window start timestamp
val startTs: Long = last.getField(keysAndAggregatesArity).asInstanceOf[Long] // set collector and window
collector.wrappedCollector = out
collector.windowStart = startTs
collector.windowEnd = startTs + windowSize collector.collect(output)
} }

window函数 resetAccumulator的更多相关文章

  1. DStream-04 Window函数的原理和源码

    DStream 中 window 函数有两种,一种是普通 WindowedDStream,另外一种是针对 window聚合 优化的 ReducedWindowedDStream. Demo objec ...

  2. MySQL 对window函数执行sum函数疑似Bug

    MySQL 对window函数执行sum函数疑似Bug 使用MySql的窗口函数统计数据时,发现一个小的问题,与大家一起探讨下. 环境配置: mysql-installer-community-8.0 ...

  3. 使用streaming window函数统计用户不同时间段平均消费金额等指标

    场景 现在餐厅老板已经不满足仅仅统计历史用户消费金额总数了,他想知道每个用户半年,每个月,每天,或者一小时消费的总额,来店消费的次数以及平均金额. 给出的例子计算的是每5秒,每30秒,每1分钟的用户消 ...

  4. javascript中的this与函数讲解

    前言 javascript中没有块级作用域(es6以前),javascript中作用域分为函数作用域和全局作用域.并且,大家可以认为全局作用域其实就是Window函数的函数作用域,我们编写的js代码, ...

  5. avascript中的this与函数讲解

    徐某某 一个半路出家的野生程序员 javascript中的this与函数讲解 前言 javascript中没有块级作用域(es6以前),javascript中作用域分为函数作用域和全局作用域.并且,大 ...

  6. javascript篇-----函数作用域,函数作用域链和声明提前

    在一些类似C语言的编程语言中,花括号内的每一段代码都具有各自的作用域,而且变量在声明它们的代码段之外是不可见的(也就是我们不能在代码段外直接访问代码段内声明的变量),我们称之为块级作用域,然而,不同于 ...

  7. javascript 函数声明与函数表达式的区别

    先看一段代码 var f = function g() { return 1; }; if (false) { f = function g(){ return 2; }; } alert(g()); ...

  8. javascript + jquery函数大全

    JAVASCRIPT Array 函数   array创建数组 concat()连接两个或更多的数组,并返回结果. join()把数组中所有元素组成字符串. pop()删除并返回数组的最后一个元素 s ...

  9. JavaScript window

    window -- window对象是BOM中所有对象的核心 window,中文"窗口" window对象除了是BOM中所有对象的父对象外,还包含一些窗口控制函数 全局的windo ...

随机推荐

  1. Centos7新装配置, 并使用openvpn client长连接远程备份

    1. 修改本机hostname // 查看本机hostname hostnamectl //永久性的修改主机名称, 修改完后新开的terminal中立刻生效. 也可以直接修改 /etc/hostnam ...

  2. Ubuntu16.04编译安装Redis Desktop Manager

    Redis Desktop Manager for OSX&Ubuntu 的安装版是收费的, 如果自己编译则是免费的. 安装过程参考官方提供的文档  http://docs.redisdesk ...

  3. shell脚本启动node服务

    #!/bin/bash cd /root/dev-web source /etc/profile /usr/local/node-8.11.1/bin/npm i && EGG_SER ...

  4. ios实例开发精品文章推荐(8.12)11个处理触摸事件和多点触摸的JS库

    11个处理触摸事件和多点触摸的JS库 触摸屏是现在所有智能手机的标配,还包括各种平板设备,而且很多桌面也慢慢在开始支持触摸操作.要开发支持触摸屏设备的Web应用,我们需要借助浏览器的触摸事件来实现. ...

  5. Jetty使用内存过大的解决方案

    之前用Jetty做过一个消息通知服务器,主要功能就是其他各个子系统如果有需要push给客户端消息的就把这个消息发给我的Server,我用WebSocket来推送给客户端~ 程序上线一段时间之后运维工程 ...

  6. 魅族MX四核手机转让,二手淘宝上+hi-pda论坛结合使用成功已出

    2013-3-14 内容存档在evernote,笔记名"魅族MX四核手机转让,二手淘宝上+hi-pda论坛结合使用成功已出"

  7. nRF2401A/nRF24L01/nRF24L01+无线模块最常见问题汇集(转)

    俗话说:每个人一生下来什么都会的,都是通过自己努力和探索出来的,NRF系列芯片,刚开始都好奇心加兴趣才来捣鼓它的,刚开始做硬件和软件,没有收发数据弄得整个人头都快炸开了,所以在此和大家分享一下前辈的经 ...

  8. golang ---image

    package main import ( "image" "image/color" "image/gif" "os" ...

  9. Markdown 使用教程

    前言 以前经常在 github 中看到 .md 格式的文件,一直没有注意,也不明白为什么文本文档的后缀不是 .txt ,后来无意中看到了 Markdown,看到了用这个东西写得一些web界面等特别的规 ...

  10. windows 中安装及使用 SSH Key

    转自 简书技术博客:https://www.jianshu.com/p/a3b4f61d4747 联系管理员开通ssh功能: 重新创建环境: 下载工具包到本地机器wsCli 0.4 解压后,把相应的w ...