[KYUUBI #6250][FOLLOWUP] Simplify code after dropping Spark 3.1

# 🔍 Description

As title, simplify code after dropping Spark 3.1.

## Types of changes 🔖

- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

Pass GHA.

---

# Checklist 📝

- [x] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

**Be nice. Be informative.**

Closes #6277 from pan3793/6273-followup.

Closes #6250

2b52de6c0 [Cheng Pan] [KYUUBI #6250][FOLLOWUP] Simplify code after dropping Spark 3.1

Authored-by: Cheng Pan <chengpan@apache.org>
Signed-off-by: Cheng Pan <chengpan@apache.org>
This commit is contained in:
Cheng Pan 2024-04-09 19:05:55 +08:00
parent a26156b8f4
commit 563d005846
No known key found for this signature in database
GPG Key ID: 8001952629BCC75D
2 changed files with 7 additions and 51 deletions

View File

@ -21,7 +21,6 @@ import java.util.Collections
import scala.collection.JavaConverters._
import org.apache.spark.sql.kyuubi.SparkDataTypeHelper
import org.apache.spark.sql.types._
import org.apache.kyuubi.shaded.hive.service.rpc.thrift._
@ -33,16 +32,6 @@ object SchemaHelper {
*/
final val TIMESTAMP_NTZ = "TimestampNTZType$"
/**
* Spark 3.2.0 DataType DayTimeIntervalType's class name.
*/
final val DAY_TIME_INTERVAL = "DayTimeIntervalType"
/**
* Spark 3.2.0 DataType YearMonthIntervalType's class name.
*/
final val YEAR_MONTH_INTERVAL = "YearMonthIntervalType"
def toTTypeId(typ: DataType): TTypeId = typ match {
case NullType => TTypeId.NULL_TYPE
case BooleanType => TTypeId.BOOLEAN_TYPE
@ -59,15 +48,12 @@ object SchemaHelper {
case ntz if ntz.getClass.getSimpleName.equals(TIMESTAMP_NTZ) => TTypeId.TIMESTAMP_TYPE
case BinaryType => TTypeId.BINARY_TYPE
case CalendarIntervalType => TTypeId.STRING_TYPE
case dt if dt.getClass.getSimpleName.equals(DAY_TIME_INTERVAL) =>
TTypeId.INTERVAL_DAY_TIME_TYPE
case ym if ym.getClass.getSimpleName.equals(YEAR_MONTH_INTERVAL) =>
TTypeId.INTERVAL_YEAR_MONTH_TYPE
case _: DayTimeIntervalType => TTypeId.INTERVAL_DAY_TIME_TYPE
case _: YearMonthIntervalType => TTypeId.INTERVAL_YEAR_MONTH_TYPE
case _: ArrayType => TTypeId.ARRAY_TYPE
case _: MapType => TTypeId.MAP_TYPE
case _: StructType => TTypeId.STRUCT_TYPE
// SPARK-7768(fixed in 3.2.0) promoted UserDefinedType to DeveloperApi
case _ if SparkDataTypeHelper.isUserDefinedType(typ) => TTypeId.USER_DEFINED_TYPE
case _: UserDefinedType[_] => TTypeId.USER_DEFINED_TYPE
case other =>
throw new IllegalArgumentException(s"Unrecognized type name: ${other.catalogString}")
}
@ -140,13 +126,12 @@ object SchemaHelper {
* For array, map, string, and binaries, the column size is variable, return null as unknown.
*/
def getColumnSize(sparkType: DataType): Option[Int] = sparkType match {
case dt
if Array(TIMESTAMP_NTZ, DAY_TIME_INTERVAL, YEAR_MONTH_INTERVAL)
.contains(dt.getClass.getSimpleName) => Some(dt.defaultSize)
case dt if dt.getClass.getSimpleName == TIMESTAMP_NTZ =>
Some(dt.defaultSize)
case dt: DecimalType =>
Some(dt.precision)
case dt @ (BooleanType | _: NumericType | DateType | TimestampType |
CalendarIntervalType | NullType) =>
case dt @ (BooleanType | _: NumericType | DateType | TimestampType | NullType |
CalendarIntervalType | _: DayTimeIntervalType | _: YearMonthIntervalType) =>
Some(dt.defaultSize)
case StructType(fields) =>
val sizeArr = fields.map(f => getColumnSize(f.dataType))

View File

@ -1,29 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kyuubi
import org.apache.spark.sql.types.{DataType, UserDefinedType}
object SparkDataTypeHelper {
def isUserDefinedType(typ: DataType): Boolean = {
typ match {
case _: UserDefinedType[_] => true
case _ => false
}
}
}