Skip to content

Commit

Permalink
feat: Add Comet windows function support. fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
comphead committed Mar 12, 2024
1 parent 068c958 commit 8595587
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@
package org.apache.comet

import java.nio.ByteOrder

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SparkSessionExtensions
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.comet._
import org.apache.spark.sql.comet.execution.shuffle.{CometColumnarShuffle, CometNativeShuffle}
Expand All @@ -37,15 +37,16 @@ import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan
import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ShuffleExchangeExec}
import org.apache.spark.sql.execution.window.WindowExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._

import org.apache.comet.CometConf._
import org.apache.comet.CometSparkSessionExtensions.{isANSIEnabled, isCometBroadCastEnabled, isCometColumnarShuffleEnabled, isCometEnabled, isCometExecEnabled, isCometOperatorEnabled, isCometScan, isCometScanEnabled, isCometShuffleEnabled, isSchemaSupported}
import org.apache.comet.parquet.{CometParquetScan, SupportsComet}
import org.apache.comet.serde.OperatorOuterClass.Operator
import org.apache.comet.serde.QueryPlanSerde
import org.apache.comet.shims.ShimCometSparkSessionExtensions
import org.apache.spark.sql.execution.window.WindowExec

class CometSparkSessionExtensions
extends (SparkSessionExtensions => Unit)
Expand Down
12 changes: 7 additions & 5 deletions spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.apache.comet.serde

import scala.collection.JavaConverters._

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Average, Count, Final, First, Last, Max, Min, Partial, Sum}
Expand All @@ -32,15 +33,16 @@ import org.apache.spark.sql.execution
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.aggregate.HashAggregateExec
import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ShuffleExchangeExec}
import org.apache.spark.sql.execution.window.WindowExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

import org.apache.comet.CometSparkSessionExtensions.{isCometOperatorEnabled, isCometScan, isSpark32, isSpark34Plus}
import org.apache.comet.serde.ExprOuterClass.{AggExpr, Expr, ScalarFunc, DataType => ProtoDataType}
import org.apache.comet.serde.ExprOuterClass.{AggExpr, DataType => ProtoDataType, Expr, ScalarFunc}
import org.apache.comet.serde.ExprOuterClass.DataType.{DataTypeInfo, DecimalInfo, ListInfo, MapInfo, StructInfo}
import org.apache.comet.serde.OperatorOuterClass.{Operator, AggregateMode => CometAggregateMode}
import org.apache.comet.serde.OperatorOuterClass.{AggregateMode => CometAggregateMode, Operator}
import org.apache.comet.shims.ShimQueryPlanSerde
import org.apache.spark.sql.execution.window.WindowExec

/**
* An utility object for query plan and expression serialization.
Expand Down Expand Up @@ -186,8 +188,8 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde {
}

def windowExprToProto(
windowExpr: WindowExpression,
inputs: Seq[Attribute]): Option[OperatorOuterClass.WindowExpr] = {
windowExpr: WindowExpression,
inputs: Seq[Attribute]): Option[OperatorOuterClass.WindowExpr] = {
val func = exprToProto(windowExpr.windowFunction, inputs).getOrElse(return None)

val f = windowExpr.windowSpec.frameSpecification
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,26 @@
package org.apache.spark.sql.comet
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.comet.serde.OperatorOuterClass
import org.apache.comet.serde.OperatorOuterClass.Operator
import org.apache.comet.serde.QueryPlanSerde.{exprToProto, serializeDataType, windowExprToProto}
package org.apache.spark.sql.comet

import scala.collection.JavaConverters.asJavaIterableConverter

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Expression, NamedExpression, SortOrder, WindowExpression}
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
Expand All @@ -13,6 +29,10 @@ import org.apache.spark.sql.execution.{SparkPlan, UnaryExecNode}
import org.apache.spark.sql.execution.metric.{SQLMetrics, SQLShuffleReadMetricsReporter, SQLShuffleWriteMetricsReporter}
import org.apache.spark.sql.vectorized.ColumnarBatch

import org.apache.comet.serde.OperatorOuterClass
import org.apache.comet.serde.OperatorOuterClass.Operator
import org.apache.comet.serde.QueryPlanSerde.{exprToProto, serializeDataType, windowExprToProto}

/**
* Comet physical plan node for Spark `WindowsExec`.
*
Expand All @@ -21,12 +41,12 @@ import org.apache.spark.sql.vectorized.ColumnarBatch
* executions separated by a Comet shuffle exchange.
*/
case class CometWindowExec(
override val originalPlan: SparkPlan,
windowExpression: Seq[NamedExpression],
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
child: SparkPlan)
extends CometExec
override val originalPlan: SparkPlan,
windowExpression: Seq[NamedExpression],
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
child: SparkPlan)
extends CometExec
with UnaryExecNode {

override def nodeName: String = "CometWindowExec"
Expand Down Expand Up @@ -68,11 +88,11 @@ case class CometWindowExec(

object CometWindowExec {
def getNativePlan(
outputAttributes: Seq[Attribute],
windowExpression: Seq[NamedExpression],
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
child: SparkPlan): Option[Operator] = {
outputAttributes: Seq[Attribute],
windowExpression: Seq[NamedExpression],
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
child: SparkPlan): Option[Operator] = {

val orderSpecs = orderSpec.map(exprToProto(_, child.output))
val partitionSpecs = partitionSpec.map(exprToProto(_, child.output))
Expand Down Expand Up @@ -110,4 +130,4 @@ object CometWindowExec {
Some(opBuilder.setWindow(windowBuilder).build())
} else None
}
}
}
4 changes: 2 additions & 2 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,13 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
Range(0, len).map(_ => chars.charAt(r.nextInt(chars.length))).mkString
}

private def fuzzCastFromString(chars: String, maxLen: Int, toType: DataType) {
private def fuzzCastFromString(chars: String, maxLen: Int, toType: DataType): Unit = {
val r = new Random(0)
val inputs = Range(0, 10000).map(_ => genString(r, chars, maxLen))
castTest(inputs.toDF("a"), toType)
}

private def castTest(input: DataFrame, toType: DataType) {
private def castTest(input: DataFrame, toType: DataType): Unit = {
withTempPath { dir =>
val df = roundtripParquet(input, dir)
.withColumn("converted", col("a").cast(toType))
Expand Down

0 comments on commit 8595587

Please sign in to comment.