Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@

import org.apache.flink.FlinkVersion;
import org.apache.flink.api.dag.Transformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
Expand Down Expand Up @@ -205,11 +206,15 @@ protected Transformation<RowData> translateToPlanInternal(
// TODO: support more window types.
Tuple5<Long, Long, Long, Integer, Integer> windowSpecParams =
WindowUtils.extractWindowParameters(windowing);
Tuple2<Integer, Integer> windowStartAndEndIndexes =
WindowUtils.getWindowStartAndEndIndexes(namedWindowProperties, (RowType) getOutputType());
long size = windowSpecParams.f0;
long slide = windowSpecParams.f1;
long offset = windowSpecParams.f2;
int rowtimeIndex = windowSpecParams.f3;
int windowType = windowSpecParams.f4;
int windowStartIndex = windowStartAndEndIndexes.f0;
int windowEndIndex = windowStartAndEndIndexes.f1;
PartitionFunctionSpec sliceAssignerSpec =
new StreamWindowPartitionFunctionSpec(
inputType, rowtimeIndex, size, slide, offset, windowType);
Expand Down Expand Up @@ -252,7 +257,10 @@ protected Transformation<RowData> translateToPlanInternal(
offset,
windowType,
outputType,
rowtimeIndex);
false,
rowtimeIndex,
windowStartIndex,
windowEndIndex);
final OneInputStreamOperator windowOperator =
new GlutenVectorOneInputOperator(
new StatefulPlanNode(windowAgg.getId(), windowAgg),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,10 @@ protected Transformation<RowData> translateToPlanInternal(
offset,
windowType,
outputType,
rowtimeIndex);
false,
rowtimeIndex,
-1,
-1);
final OneInputStreamOperator localAggOperator =
new GlutenVectorOneInputOperator(
new StatefulPlanNode(windowAgg.getId(), windowAgg),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@

import org.apache.flink.FlinkVersion;
import org.apache.flink.api.dag.Transformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
Expand Down Expand Up @@ -197,11 +198,16 @@ protected Transformation<RowData> translateToPlanInternal(
// TODO: support more window types.
Tuple5<Long, Long, Long, Integer, Integer> windowSpecParams =
WindowUtils.extractWindowParameters(windowing);
Tuple2<Integer, Integer> windowStartAndEndIndexes =
WindowUtils.getWindowStartAndEndIndexes(namedWindowProperties, (RowType) getOutputType());
long size = windowSpecParams.f0;
long slide = windowSpecParams.f1;
long offset = windowSpecParams.f2;
int rowtimeIndex = windowSpecParams.f3;
int windowType = windowSpecParams.f4;
boolean isRowTime = windowing.isRowtime();
int windowStartIndex = windowStartAndEndIndexes.f0;
int windowEndIndex = windowStartAndEndIndexes.f1;
PartitionFunctionSpec sliceAssignerSpec =
new StreamWindowPartitionFunctionSpec(
inputType, rowtimeIndex, size, slide, offset, windowType);
Expand All @@ -210,25 +216,29 @@ protected Transformation<RowData> translateToPlanInternal(
PlanNodeIdGenerator.newId(),
AggregateStep.SINGLE,
groupingKeys,
groupingKeys,
isRowTime ? groupingKeys : List.of(),
aggNames,
aggregates,
false,
List.of(new EmptyNode(inputType)),
null,
List.of());
// processing time window can not apply to local-global aggregate optimization, so here we need
// to set local aggregtate as null when it is not event time window.
PlanNode localAgg =
new AggregationNode(
PlanNodeIdGenerator.newId(),
AggregateStep.SINGLE,
groupingKeys,
groupingKeys,
aggNames,
aggregates,
false,
List.of(new EmptyNode(inputType)),
null,
List.of());
isRowTime
? new AggregationNode(
PlanNodeIdGenerator.newId(),
AggregateStep.SINGLE,
groupingKeys,
groupingKeys,
aggNames,
aggregates,
false,
List.of(new EmptyNode(inputType)),
null,
List.of())
: null;
PlanNode windowAgg =
new StreamWindowAggregationNode(
PlanNodeIdGenerator.newId(),
Expand All @@ -244,7 +254,10 @@ protected Transformation<RowData> translateToPlanInternal(
offset,
windowType,
outputType,
rowtimeIndex);
windowing.isRowtime(),
rowtimeIndex,
windowStartIndex,
windowEndIndex);
final OneInputStreamOperator windowOperator =
new GlutenVectorOneInputOperator(
new StatefulPlanNode(windowAgg.getId(), windowAgg),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,24 @@
*/
package org.apache.gluten.rexnode;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.table.planner.plan.logical.CumulativeWindowSpec;
import org.apache.flink.table.planner.plan.logical.HoppingWindowSpec;
import org.apache.flink.table.planner.plan.logical.SessionWindowSpec;
import org.apache.flink.table.planner.plan.logical.SliceAttachedWindowingStrategy;
import org.apache.flink.table.planner.plan.logical.TimeAttributeWindowingStrategy;
import org.apache.flink.table.planner.plan.logical.TumblingWindowSpec;
import org.apache.flink.table.planner.plan.logical.WindowAttachedWindowingStrategy;
import org.apache.flink.table.planner.plan.logical.WindowSpec;
import org.apache.flink.table.planner.plan.logical.WindowingStrategy;
import org.apache.flink.table.runtime.groupwindow.NamedWindowProperty;
import org.apache.flink.table.runtime.groupwindow.WindowEnd;
import org.apache.flink.table.runtime.groupwindow.WindowStart;
import org.apache.flink.table.types.logical.RowType;

import java.time.Duration;
import java.util.List;

/** Utility to store some useful functions. */
public class WindowUtils {
Expand Down Expand Up @@ -53,24 +61,27 @@ public static Tuple5<Long, Long, Long, Integer, Integer> extractWindowParameters
if (windowOffset != null) {
offset = windowOffset.toMillis();
}
windowType = 0;
} else if (windowSpec instanceof TumblingWindowSpec) {
size = ((TumblingWindowSpec) windowSpec).getSize().toMillis();
Duration windowOffset = ((TumblingWindowSpec) windowSpec).getOffset();
if (windowOffset != null) {
offset = windowOffset.toMillis();
}
windowType = 1;
} else if (windowSpec instanceof CumulativeWindowSpec) {
windowType = 2;
} else if (windowSpec instanceof SessionWindowSpec) {
windowType = 3;
} else {
throw new RuntimeException("Not support window spec " + windowSpec);
}

if (windowing instanceof TimeAttributeWindowingStrategy) {
if (windowing.isRowtime()) {
rowtimeIndex = ((TimeAttributeWindowingStrategy) windowing).getTimeAttributeIndex();
}
windowType = 0;
} else if (windowing instanceof WindowAttachedWindowingStrategy) {
rowtimeIndex = ((WindowAttachedWindowingStrategy) windowing).getWindowEnd();
windowType = 1;
} else if (windowing instanceof SliceAttachedWindowingStrategy) {
rowtimeIndex = ((SliceAttachedWindowingStrategy) windowing).getSliceEnd();
} else {
Expand All @@ -79,4 +90,18 @@ public static Tuple5<Long, Long, Long, Integer, Integer> extractWindowParameters
return new Tuple5<Long, Long, Long, Integer, Integer>(
size, slide, offset, rowtimeIndex, windowType);
}

public static Tuple2<Integer, Integer> getWindowStartAndEndIndexes(
NamedWindowProperty[] props, RowType outputType) {
int windowStartIndex = -1, windowEndIndex = -1;
List<String> outputNames = outputType.getFieldNames();
for (NamedWindowProperty prop : props) {
if (prop.getProperty() instanceof WindowStart) {
windowStartIndex = outputNames.indexOf(prop.getName());
} else if (prop.getProperty() instanceof WindowEnd) {
windowEndIndex = outputNames.indexOf(prop.getName());
}
}
return new Tuple2<Integer, Integer>(windowStartIndex, windowEndIndex);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ public class VeloxQueryConfig {
private static final String keyVeloxAdjustTimestampToSessionTimeZone =
"adjust_timestamp_to_session_timezone";
private static final String keyVeloxSessionTimezone = "session_timezone";
private static final String kStreamingAggregationMinOutputBatchRows =
"streaming_aggregation_min_output_batch_rows";

public static Config getConfig(RuntimeContext context) {
if (!(context instanceof StreamingRuntimeContext)) {
Expand All @@ -47,6 +49,7 @@ public static Config getConfig(RuntimeContext context) {
} else {
configMap.put(keyVeloxSessionTimezone, localTimeZone);
}
configMap.put(kStreamingAggregationMinOutputBatchRows, String.valueOf(1));
return Config.create(configMap);
}
}
Loading