Skip to content

Commit 3a7adaf

Browse files
committed
spark 3.2 -> spark 3.1
1 parent 5a1fa5a commit 3a7adaf

33 files changed

+73
-52
lines changed
Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
33
<modelVersion>4.0.0</modelVersion>
44

5-
<name>Kotlin Spark API: Scala core for Spark 3.2+ (Scala 2.12)</name>
6-
<description>Scala-Spark 3.2+ compatibility layer for Kotlin for Apache Spark</description>
7-
<artifactId>core-3.2_2.12</artifactId>
5+
<name>Kotlin Spark API: Scala core for Spark 3.1+ (Scala 2.12)</name>
6+
<description>Scala-Spark 3.1+ compatibility layer for Kotlin for Apache Spark</description>
7+
<artifactId>core-3.1_2.12</artifactId>
88
<parent>
99
<groupId>org.jetbrains.kotlinx.spark</groupId>
1010
<artifactId>kotlin-spark-api-parent_2.12</artifactId>
@@ -70,6 +70,14 @@
7070
<skip>true</skip>
7171
</configuration>
7272
</plugin>
73+
<plugin>
74+
<groupId>org.apache.maven.plugins</groupId>
75+
<artifactId>maven-compiler-plugin</artifactId>
76+
<configuration>
77+
<source>8</source>
78+
<target>8</target>
79+
</configuration>
80+
</plugin>
7381
</plugins>
7482
</build>
7583
</project>

core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala renamed to core/3.1/src/main/scala/org/apache/spark/sql/KotlinReflection.scala

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ import java.lang.Exception
4949
*/
5050
//noinspection RedundantBlock
5151
object KotlinReflection extends KotlinReflection {
52+
ScalaReflection
5253
/**
5354
* Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping
5455
* to a native type, an ObjectType is returned.
@@ -290,13 +291,13 @@ object KotlinReflection extends KotlinReflection {
290291
createDeserializerForSqlTimestamp(path)
291292
}
292293
case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => {
293-
createDeserializerForLocalDateTime(path)
294+
throw new IllegalArgumentException("java.time.LocalDateTime is supported in Spark 3.2+")
294295
}
295296
case t if isSubtype(t, localTypeOf[java.time.Duration]) => {
296-
createDeserializerForDuration(path)
297+
throw new IllegalArgumentException("java.time.Duration is supported in Spark 3.2+")
297298
}
298299
case t if isSubtype(t, localTypeOf[java.time.Period]) => {
299-
createDeserializerForPeriod(path)
300+
throw new IllegalArgumentException("java.time.Period is supported in Spark 3.2+")
300301
}
301302
case t if isSubtype(t, localTypeOf[java.lang.String]) => {
302303
createDeserializerForString(path, returnNullable = false)
@@ -828,7 +829,7 @@ object KotlinReflection extends KotlinReflection {
828829
createSerializerForSqlTimestamp(inputObject)
829830
}
830831
case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => {
831-
createSerializerForLocalDateTime(inputObject)
832+
throw new IllegalArgumentException("java.time.LocalDateTime is supported in Spark 3.2+")
832833
}
833834
case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => {
834835
createSerializerForJavaLocalDate(inputObject)
@@ -837,10 +838,10 @@ object KotlinReflection extends KotlinReflection {
837838
createSerializerForSqlDate(inputObject)
838839
}
839840
case t if isSubtype(t, localTypeOf[java.time.Duration]) => {
840-
createSerializerForJavaDuration(inputObject)
841+
throw new IllegalArgumentException("java.time.Duration is supported in Spark 3.2+")
841842
}
842843
case t if isSubtype(t, localTypeOf[java.time.Period]) => {
843-
createSerializerForJavaPeriod(inputObject)
844+
throw new IllegalArgumentException("java.time.Period is supported in Spark 3.2+")
844845
}
845846
case t if isSubtype(t, localTypeOf[BigDecimal]) => {
846847
createSerializerForScalaBigDecimal(inputObject)
@@ -1178,7 +1179,7 @@ object KotlinReflection extends KotlinReflection {
11781179
}
11791180
// SPARK-36227: Remove TimestampNTZ type support in Spark 3.2 with minimal code changes.
11801181
case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) && Utils.isTesting => {
1181-
Schema(TimestampNTZType, nullable = true)
1182+
throw new IllegalArgumentException("TimestampNTZType is supported in spark 3.2+")
11821183
}
11831184
case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => {
11841185
Schema(DateType, nullable = true)
@@ -1190,10 +1191,10 @@ object KotlinReflection extends KotlinReflection {
11901191
Schema(CalendarIntervalType, nullable = true)
11911192
}
11921193
case t if isSubtype(t, localTypeOf[java.time.Duration]) => {
1193-
Schema(DayTimeIntervalType(), nullable = true)
1194+
throw new IllegalArgumentException("DayTimeIntervalType for java.time.Duration is supported in spark 3.2+")
11941195
}
11951196
case t if isSubtype(t, localTypeOf[java.time.Period]) => {
1196-
Schema(YearMonthIntervalType(), nullable = true)
1197+
throw new IllegalArgumentException("YearMonthIntervalType for java.time.Period is supported in spark 3.2+")
11971198
}
11981199
case t if isSubtype(t, localTypeOf[BigDecimal]) => {
11991200
Schema(DecimalType.SYSTEM_DEFAULT, nullable = true)
@@ -1268,8 +1269,6 @@ object KotlinReflection extends KotlinReflection {
12681269
@scala.annotation.tailrec
12691270
def javaBoxedType(dt: DataType): Class[_] = dt match {
12701271
case _: DecimalType => classOf[Decimal]
1271-
case _: DayTimeIntervalType => classOf[java.lang.Long]
1272-
case _: YearMonthIntervalType => classOf[java.lang.Integer]
12731272
case BinaryType => classOf[Array[Byte]]
12741273
case StringType => classOf[UTF8String]
12751274
case CalendarIntervalType => classOf[CalendarInterval]

core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala renamed to core/3.1/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ class KDataTypeWrapper(
7777

7878
override private[ sql ] def getFieldIndex(name: String) = dt.getFieldIndex(name)
7979

80-
private[ sql ] def findNestedField(fieldNames: Seq[ String ], includeCollections: Boolean, resolver: Resolver) =
80+
override private[ sql ] def findNestedField(fieldNames: Seq[ String ], includeCollections: Boolean, resolver: Resolver) =
8181
dt.findNestedField(fieldNames, includeCollections, resolver)
8282

8383
override private[ sql ] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit =

core/3.2/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala renamed to core/3.1/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala

File renamed without changes.

core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala renamed to core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala

File renamed without changes.

core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala renamed to core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala

File renamed without changes.

dummy/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
<dependencies>
1717
<dependency>
1818
<groupId>org.jetbrains.kotlinx.spark</groupId>
19-
<artifactId>examples-3.2_2.12</artifactId>
19+
<artifactId>examples-3.1_2.12</artifactId>
2020
<version>${project.parent.version}</version>
2121
</dependency>
2222
</dependencies>
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33

44
<modelVersion>4.0.0</modelVersion>
55

6-
<name>Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12)</name>
6+
<name>Kotlin Spark API: Examples for Spark 3.1+ (Scala 2.12)</name>
77
<description>Example of usage</description>
8-
<artifactId>examples-3.2_2.12</artifactId>
8+
<artifactId>examples-3.1_2.12</artifactId>
99
<parent>
1010
<groupId>org.jetbrains.kotlinx.spark</groupId>
1111
<artifactId>kotlin-spark-api-parent_2.12</artifactId>
@@ -16,7 +16,7 @@
1616
<dependencies>
1717
<dependency>
1818
<groupId>org.jetbrains.kotlinx.spark</groupId>
19-
<artifactId>kotlin-spark-api-3.2</artifactId>
19+
<artifactId>kotlin-spark-api-3.1</artifactId>
2020
<version>${project.version}</version>
2121
</dependency>
2222
<dependency>
@@ -39,7 +39,7 @@
3939
<build>
4040
<sourceDirectory>src/main/kotlin</sourceDirectory>
4141
<testSourceDirectory>src/test/kotlin</testSourceDirectory>
42-
<directory>target/3.2/${scala.compat.version}</directory>
42+
<directory>target/3.1/${scala.compat.version}</directory>
4343
<plugins>
4444
<plugin>
4545
<groupId>org.jetbrains.kotlin</groupId>
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33

44
<modelVersion>4.0.0</modelVersion>
55

6-
<name>Kotlin Spark API: API for Spark 3.2+ (Scala 2.12)</name>
7-
<artifactId>kotlin-spark-api-3.2</artifactId>
8-
<description>Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark</description>
6+
<name>Kotlin Spark API: API for Spark 3.1+ (Scala 2.12)</name>
7+
<artifactId>kotlin-spark-api-3.1</artifactId>
8+
<description>Kotlin API compatible with spark 3.1.3 Kotlin for Apache Spark</description>
99
<parent>
1010
<groupId>org.jetbrains.kotlinx.spark</groupId>
1111
<artifactId>kotlin-spark-api-parent_2.12</artifactId>
@@ -25,7 +25,7 @@
2525
</dependency>
2626
<dependency>
2727
<groupId>org.jetbrains.kotlinx.spark</groupId>
28-
<artifactId>core-3.2_${scala.compat.version}</artifactId>
28+
<artifactId>core-3.1_${scala.compat.version}</artifactId>
2929
</dependency>
3030
<dependency>
3131
<groupId>org.jetbrains.kotlinx.spark</groupId>

kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt renamed to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt

File renamed without changes.

0 commit comments

Comments
 (0)