Skip to content

Commit 396b634

Browse files
authored
Merge pull request #203 from Kotlin/1.2.4
1.2.4
2 parents bbde39a + ee9b8ea commit 396b634

File tree

10 files changed

+40
-34
lines changed

10 files changed

+40
-34
lines changed

.github/workflows/build.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ jobs:
1212
strategy:
1313
matrix:
1414
scala: [ "2.12.17", "2.13.10" ]
15-
spark: [ "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
15+
spark: [ "3.3.2", "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
1616
exclude:
1717
- scala: "2.13.10"
1818
spark: "3.1.3"

.github/workflows/publish_dev_version.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ jobs:
1010
strategy:
1111
matrix:
1212
scala: [ "2.12.17", "2.13.10" ]
13-
spark: [ "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
13+
spark: [ "3.3.2", "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
1414
exclude:
1515
- scala: "2.13.10"
1616
spark: "3.1.3"

.github/workflows/publish_release_version.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ jobs:
99
strategy:
1010
matrix:
1111
scala: [ "2.12.17", "2.13.10" ]
12-
spark: [ "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
12+
spark: [ "3.3.2", "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
1313
exclude:
1414
- scala: "2.13.10"
1515
spark: "3.1.3"

README.md

+6-4
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
[![Kotlin Stable](https://kotl.in/badges/stable.svg)](https://kotlinlang.org/docs/components-stability.html)
44
[![JetBrains official project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
5-
[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api_3.3.1_2.13.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api_3.3.1_2.13")
5+
[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api_3.3.2_2.13.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api_3.3.2_2.13")
66
[![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
77

88
Your next API to work with [Apache Spark](https://spark.apache.org/).
@@ -38,6 +38,8 @@ We have opened a Spark Project Improvement Proposal: [Kotlin support for Apache
3838

3939
| Apache Spark | Scala | Kotlin for Apache Spark |
4040
|:------------:|:-----:|:-----------------------------------:|
41+
| 3.3.2 | 2.13 | kotlin-spark-api_3.3.2_2.13:VERSION |
42+
| | 2.12 | kotlin-spark-api_3.3.2_2.12:VERSION |
4143
| 3.3.1 | 2.13 | kotlin-spark-api_3.3.1_2.13:VERSION |
4244
| | 2.12 | kotlin-spark-api_3.3.1_2.12:VERSION |
4345
| 3.3.0 | 2.13 | kotlin-spark-api_3.3.0_2.13:VERSION |
@@ -73,7 +75,7 @@ The Kotlin for Spark artifacts adhere to the following convention:
7375
The only exception to this is `scala-tuples-in-kotlin_[Scala core version]:[Kotlin for Apache Spark API version]`, which is
7476
independent of Spark.
7577

76-
[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api_3.3.1_2.13.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api_3.3.1_2.13")
78+
[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api_3.3.2_2.13.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api_3.3.2_2.13")
7779

7880
## How to configure Kotlin for Apache Spark in your project
7981

@@ -84,7 +86,7 @@ Here's an example `pom.xml`:
8486
```xml
8587
<dependency>
8688
<groupId>org.jetbrains.kotlinx.spark</groupId>
87-
<artifactId>kotlin-spark-api_3.3.1_2.13</artifactId>
89+
<artifactId>kotlin-spark-api_3.3.2_2.13</artifactId>
8890
<version>${kotlin-spark-api.version}</version>
8991
</dependency>
9092
<dependency>
@@ -117,7 +119,7 @@ To it, simply add
117119
to the top of your notebook. This will get the latest version of the API, together with the latest version of Spark.
118120
To define a certain version of Spark or the API itself, simply add it like this:
119121
```jupyterpython
120-
%use spark(spark=3.3.1, scala=2.13, v=1.2.2)
122+
%use spark(spark=3.3.2, scala=2.13, v=1.2.3)
121123
```
122124

123125
Inside the notebook a Spark session will be initiated automatically. This can be accessed via the `spark` value.

buildSrc/src/main/kotlin/Versions.kt

+6-6
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,25 @@
11
object Versions {
2-
const val project = "1.2.3"
2+
const val project = "1.2.4"
33
const val groupID = "org.jetbrains.kotlinx.spark"
4-
const val kotlin = "1.8.0"
4+
const val kotlin = "1.8.20"
55
const val jvmTarget = "8"
6-
const val jupyterJvmTarget = "11"
6+
const val jupyterJvmTarget = "8"
77

88
inline val spark get() = System.getProperty("spark") as String
99
inline val scala get() = System.getProperty("scala") as String
1010
inline val sparkMinor get() = spark.substringBeforeLast('.')
1111
inline val scalaCompat get() = scala.substringBeforeLast('.')
1212

13-
const val jupyter = "0.11.0-210"
13+
const val jupyter = "0.12.0-32-1"
1414
const val kotest = "5.5.4"
1515
const val kotestTestContainers = "1.3.3"
16-
const val dokka = "1.7.10"
16+
const val dokka = "1.8.20"
1717
const val jcp = "7.0.5"
1818
const val mavenPublish = "0.20.0"
1919
const val atrium = "0.17.0"
2020
const val licenseGradlePluginVersion = "0.15.0"
2121
const val kafkaStreamsTestUtils = "3.1.0"
22-
const val hadoop = "3.3.1"
22+
const val hadoop = "3.3.6"
2323
const val kotlinxHtml = "0.7.5"
2424
const val klaxon = "5.5"
2525
const val jacksonDatabind = "2.13.4.2"

core/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala

+14-13
Original file line numberDiff line numberDiff line change
@@ -82,15 +82,15 @@ object CatalystTypeConverters {
8282
final def toCatalyst(@Nullable maybeScalaValue: Any): CatalystType = {
8383
if (maybeScalaValue == null) {
8484
null.asInstanceOf[CatalystType]
85-
} else if (maybeScalaValue.isInstanceOf[Option[ScalaInputType]]) {
86-
val opt = maybeScalaValue.asInstanceOf[Option[ScalaInputType]]
87-
if (opt.isDefined) {
88-
toCatalystImpl(opt.get)
89-
} else {
90-
null.asInstanceOf[CatalystType]
91-
}
92-
} else {
93-
toCatalystImpl(maybeScalaValue.asInstanceOf[ScalaInputType])
85+
} else maybeScalaValue match {
86+
case opt: Option[ScalaInputType] =>
87+
if (opt.isDefined) {
88+
toCatalystImpl(opt.get)
89+
} else {
90+
null.asInstanceOf[CatalystType]
91+
}
92+
case _ =>
93+
toCatalystImpl(maybeScalaValue.asInstanceOf[ScalaInputType])
9494
}
9595
}
9696

@@ -429,10 +429,11 @@ object CatalystTypeConverters {
429429
// a measurable performance impact. Note that this optimization will be unnecessary if we
430430
// use code generation to construct Scala Row -> Catalyst Row converters.
431431
def convert(maybeScalaValue: Any): Any = {
432-
if (maybeScalaValue.isInstanceOf[Option[Any]]) {
433-
maybeScalaValue.asInstanceOf[Option[Any]].orNull
434-
} else {
435-
maybeScalaValue
432+
maybeScalaValue match {
433+
case option: Option[Any] =>
434+
option.orNull
435+
case _ =>
436+
maybeScalaValue
436437
}
437438
}
438439

docs/quick-start-guide.md

+6-6
Original file line numberDiff line numberDiff line change
@@ -72,13 +72,13 @@ Here's what the `pom.xml` looks like for this example:
7272
</dependency>
7373
<dependency> <!-- Kotlin Spark API dependency -->
7474
<groupId>org.jetbrains.kotlinx.spark</groupId>
75-
<artifactId>kotlin-spark-api_3.3.1_2.13</artifactId>
75+
<artifactId>kotlin-spark-api_3.3.2_2.13</artifactId>
7676
<version>1.2.3</version>
7777
</dependency>
7878
<dependency> <!-- Spark dependency -->
7979
<groupId>org.apache.spark</groupId>
8080
<artifactId>spark-sql_2.12</artifactId>
81-
<version>3.3.1</version>
81+
<version>3.3.2</version>
8282
</dependency>
8383
</dependencies>
8484

@@ -163,8 +163,8 @@ dependencies {
163163
// Kotlin stdlib
164164
implementation 'org.jetbrains.kotlin:kotlin-stdlib:1.8.0'
165165
// Kotlin Spark API
166-
implementation 'org.jetbrains.kotlinx.spark:kotlin-spark-api_3.3.1_2.13:1.2.3' // Apache Spark
167-
compileOnly 'org.apache.spark:spark-sql_2.12:3.3.1'
166+
implementation 'org.jetbrains.kotlinx.spark:kotlin-spark-api_3.3.2_2.13:1.2.3' // Apache Spark
167+
compileOnly 'org.apache.spark:spark-sql_2.12:3.3.2'
168168
}
169169
170170
compileKotlin {
@@ -198,9 +198,9 @@ dependencies {
198198
// Kotlin stdlib
199199
implementation ("org.jetbrains.kotlin:kotlin-stdlib:1.4.0")
200200
// Kotlin Spark API
201-
implementation ("org.jetbrains.kotlinx.spark:kotlin-spark-api_3.3.1_2.13:1.2.3")
201+
implementation ("org.jetbrains.kotlinx.spark:kotlin-spark-api_3.3.2_2.13:1.2.3")
202202
// Apache Spark
203-
compileOnly ("org.apache.spark:spark-sql_2.12:3.3.1")
203+
compileOnly ("org.apache.spark:spark-sql_2.12:3.3.2")
204204
}
205205
206206
compileKotlin.kotlinOptions.jvmTarget = "1.8"

gradle.properties

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,11 @@ GROUP=org.jetbrains.kotlinx.spark
77

88
# Controls the spark and scala version for the entire project
99
# can also be defined like ./gradlew -Pspark=X.X.X -Pscala=X.X.X build
10-
spark=3.3.1
10+
spark=3.3.2
1111
scala=2.13.10
1212
# scala=2.12.17
1313
skipScalaTuplesInKotlin=false
1414

1515
org.gradle.caching=true
1616
org.gradle.parallel=false
17-
#kotlin.incremental.useClasspathSnapshot=true
17+
#kotlin.incremental.useClasspathSnapshot=true

jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt

+1
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@ abstract class Integration(private val notebook: Notebook, private val options:
8484
"org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion",
8585
"org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion",
8686
"org.apache.spark:spark-sql_$scalaCompatVersion:$sparkVersion",
87+
"org.apache.spark:spark-yarn_$scalaCompatVersion:$sparkVersion",
8788
"org.apache.spark:spark-streaming_$scalaCompatVersion:$sparkVersion",
8889
"org.apache.spark:spark-mllib_$scalaCompatVersion:$sparkVersion",
8990
"org.apache.spark:spark-sql_$scalaCompatVersion:$sparkVersion",

jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt

+2
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ class JupyterTests : ShouldSpec({
5959
librariesScanner.addLibrariesFromClassLoader(
6060
classLoader = currentClassLoader,
6161
host = this,
62+
notebook = notebook,
6263
integrationTypeNameRules = listOf(
6364
PatternNameAcceptanceRule(
6465
acceptsFlag = false,
@@ -341,6 +342,7 @@ class JupyterStreamingTests : ShouldSpec({
341342
librariesScanner.addLibrariesFromClassLoader(
342343
classLoader = currentClassLoader,
343344
host = this,
345+
notebook = notebook,
344346
integrationTypeNameRules = listOf(
345347
PatternNameAcceptanceRule(
346348
acceptsFlag = false,

0 commit comments

Comments
 (0)