add ut TypeDescriptorSuite

This commit is contained in:
Kent Yao 2018-03-22 21:33:17 +08:00
parent 2970a88f03
commit e855d0f666
4 changed files with 58 additions and 6 deletions

View File

@ -21,7 +21,7 @@ import org.apache.hive.service.cli.thrift.{TPrimitiveTypeEntry, TTypeDesc, TType
import org.apache.spark.sql.types.{DataType, DecimalType}
class TypeDescriptor(typ: DataType) {
private val typeQualifiers: Option[TypeQualifiers] = typ match {
private[this] val typeQualifiers: Option[TypeQualifiers] = typ match {
case d: DecimalType => Some(TypeQualifiers.fromTypeInfo(d))
case _ => None
}

View File

@ -149,7 +149,7 @@ object ReflectUtils extends Logging {
}
}
def getObject(o: Any, name: String): Any = {
def getFieldValue(o: Any, name: String): Any = {
Try {
val field = o.getClass.getDeclaredField(name)
field.setAccessible(true)

View File

@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package yaooqinn.kyuubi.schema
import org.apache.hive.service.cli.thrift.{TCLIServiceConstants, TTypeId}
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.types.{ByteType, DecimalType, NullType}
import yaooqinn.kyuubi.utils.ReflectUtils
class TypeDescriptorSuite extends SparkFunSuite {
test("TypeDescriptor basic tests") {
val typeDescriptor = new TypeDescriptor(new DecimalType(10, 9))
val tTypeDesc = typeDescriptor.toTTypeDesc
assert(tTypeDesc.getTypesSize === 1)
assert(
tTypeDesc
.getTypes.get(0)
.getPrimitiveEntry
.getTypeQualifiers
.getQualifiers
.get(TCLIServiceConstants.PRECISION).getI32Value === 10)
val typeDescriptor2 = new TypeDescriptor(ByteType)
val tTypeDesc2 = typeDescriptor2.toTTypeDesc
assert(tTypeDesc2.getTypesSize === 1)
assert(tTypeDesc2.getTypes.get(0).getPrimitiveEntry.getTypeQualifiers === null)
assert(tTypeDesc2.getTypes.get(0).getPrimitiveEntry.getType === TTypeId.TINYINT_TYPE)
assert(ReflectUtils.getFieldValue(typeDescriptor, "typeQualifiers")
.asInstanceOf[Option[TypeDescriptor]].isDefined)
assert(ReflectUtils.getFieldValue(typeDescriptor2, "typeQualifiers")
.asInstanceOf[Option[TypeDescriptor]].isEmpty)
}
}

View File

@ -29,10 +29,10 @@ class TypeQualifiersSuite extends SparkFunSuite {
val typeQualifiers1 = TypeQualifiers.fromTypeInfo(new DecimalType(10, 9))
val typeQualifiers2 = TypeQualifiers.fromTypeInfo(BooleanType)
assert(ReflectUtils.getObject(typeQualifiers1, "precision") === Some(10))
assert(ReflectUtils.getObject(typeQualifiers1, "scale") === Some(9))
assert(ReflectUtils.getObject(typeQualifiers2, "precision") === None)
assert(ReflectUtils.getObject(typeQualifiers2, "scale") === None)
assert(ReflectUtils.getFieldValue(typeQualifiers1, "precision") === Some(10))
assert(ReflectUtils.getFieldValue(typeQualifiers1, "scale") === Some(9))
assert(ReflectUtils.getFieldValue(typeQualifiers2, "precision") === None)
assert(ReflectUtils.getFieldValue(typeQualifiers2, "scale") === None)
assert(typeQualifiers1.toTTypeQualifiers
.getQualifiers.get(TCLIServiceConstants.PRECISION).getI32Value === 10)