Skip to content

Commit 1b0b316

Browse files
committed
enabling the compiler plugin on modules, sparkifying data classes
1 parent c0a3140 commit 1b0b316

File tree

10 files changed

+40
-12
lines changed

10 files changed

+40
-12
lines changed

Diff for: build.gradle.kts

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ plugins {
2323
buildconfig version Versions.buildconfig apply false
2424

2525
// Needs to be installed in the local maven repository
26-
id("org.jetbrains.kotlinx.spark.api") version Versions.project apply false
26+
kotlinSparkApi version Versions.kotlinSparkApiGradlePlugin apply false
2727
}
2828

2929
group = Versions.groupID

Diff for: buildSrc/src/main/kotlin/Plugins.kt

+2
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ import org.gradle.api.Project
22
import org.gradle.kotlin.dsl.*
33
import org.gradle.plugin.use.PluginDependenciesSpec
44

5+
inline val PluginDependenciesSpec.kotlinSparkApi
6+
get() = id("org.jetbrains.kotlinx.spark.api")
57

68
inline val PluginDependenciesSpec.kotlin
79
get() = kotlin("jvm")

Diff for: buildSrc/src/main/kotlin/Versions.kt

+1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
object Versions : Dsl<Versions> {
22
const val project = "2.0.0-SNAPSHOT"
3+
const val kotlinSparkApiGradlePlugin = "2.0.0-SNAPSHOT"
34
const val groupID = "org.jetbrains.kotlinx.spark"
45
const val kotlin = "2.0.0-Beta5"
56
const val jvmTarget = "8"

Diff for: kotlin-spark-api/build.gradle.kts

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
@file:Suppress("UnstableApiUsage", "NOTHING_TO_INLINE")
1+
@file:Suppress("UnstableApiUsage")
22

33
import com.igormaznitsa.jcp.gradle.JcpTask
44
import com.vanniktech.maven.publish.JavadocJar.Dokka
@@ -11,6 +11,7 @@ plugins {
1111
mavenPublishBase
1212
jcp
1313
idea
14+
kotlinSparkApi // for @Sparkify
1415
}
1516

1617
group = Versions.groupID
@@ -19,6 +20,7 @@ version = Versions.project
1920

2021
repositories {
2122
mavenCentral()
23+
mavenLocal()
2224
}
2325

2426
tasks.withType<Test>().configureEach {

Diff for: kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt

+2
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import ch.tutteli.atrium.api.fluent.en_GB.*
2121
import ch.tutteli.atrium.api.verbs.expect
2222
import io.kotest.core.spec.style.ShouldSpec
2323
import io.kotest.matchers.shouldBe
24+
import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
2425
import scala.collection.Seq
2526
import java.io.Serializable
2627
import kotlin.collections.Iterator
@@ -165,4 +166,5 @@ class ApiTest : ShouldSpec({
165166

166167

167168
// (data) class must be Serializable to be broadcast
169+
@Sparkify
168170
data class SomeClass(val a: IntArray, val b: Int) : Serializable

Diff for: kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt

+4
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import org.apache.spark.sql.functions.col
3333
import org.apache.spark.sql.streaming.GroupState
3434
import org.apache.spark.sql.streaming.GroupStateTimeout
3535
import org.jetbrains.kotlinx.spark.api.tuples.*
36+
import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
3637
import scala.Tuple2
3738
import scala.Tuple3
3839
import scala.Tuple4
@@ -68,8 +69,10 @@ class DatasetFunctionTest : ShouldSpec({
6869
}
6970

7071
should("handle join operations") {
72+
@Sparkify
7173
data class Left(val id: Int, val name: String)
7274

75+
@Sparkify
7376
data class Right(val id: Int, val value: Int)
7477

7578
val first = dsOf(Left(1, "a"), Left(2, "b"))
@@ -453,4 +456,5 @@ class DatasetFunctionTest : ShouldSpec({
453456
}
454457
})
455458

459+
@Sparkify
456460
data class SomeOtherClass(val a: IntArray, val b: Int, val c: Boolean) : Serializable

Diff for: kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt

+10
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import io.kotest.matchers.string.shouldContain
2828
import org.apache.spark.sql.Dataset
2929
import org.apache.spark.sql.types.Decimal
3030
import org.apache.spark.unsafe.types.CalendarInterval
31+
import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
3132
import org.jetbrains.kotlinx.spark.api.tuples.*
3233
import scala.*
3334
import java.math.BigDecimal
@@ -600,7 +601,9 @@ class EncodingTest : ShouldSpec({
600601
}
601602

602603
should("handle strings converted to lists") {
604+
@Sparkify
603605
data class Movie(val id: Long, val genres: String)
606+
@Sparkify
604607
data class MovieExpanded(val id: Long, val genres: List<String>)
605608

606609
val comedies = listOf(Movie(1, "Comedy|Romance"), Movie(2, "Horror|Action")).toDS()
@@ -617,8 +620,10 @@ class EncodingTest : ShouldSpec({
617620

618621
should("handle strings converted to arrays") {
619622

623+
@Sparkify
620624
data class Movie(val id: Long, val genres: String)
621625

626+
@Sparkify
622627
data class MovieExpanded(val id: Long, val genres: Array<String>) {
623628
override fun equals(other: Any?): Boolean {
624629
if (this === other) return true
@@ -681,6 +686,7 @@ class EncodingTest : ShouldSpec({
681686
}
682687
})
683688

689+
@Sparkify
684690
data class IsSomethingClass(
685691
val enabled: Boolean,
686692
val isEnabled: Boolean,
@@ -690,14 +696,17 @@ data class IsSomethingClass(
690696
val getDouble: Double
691697
)
692698

699+
@Sparkify
693700
data class DataClassWithTuple<T : Product>(val tuple: T)
694701

702+
@Sparkify
695703
data class LonLat(val lon: Double, val lat: Double)
696704

697705
enum class SomeEnum { A, B }
698706

699707
enum class SomeOtherEnum(val value: Int) { C(1), D(2) }
700708

709+
@Sparkify
701710
data class ComplexEnumDataClass(
702711
val int: Int,
703712
val string: String,
@@ -711,6 +720,7 @@ data class ComplexEnumDataClass(
711720
val enumMap: Map<SomeEnum, SomeOtherEnum>,
712721
)
713722

723+
@Sparkify
714724
data class NullFieldAbleDataClass(
715725
val optionList: List<Int>?,
716726
val optionMap: Map<String, Int>?,

Diff for: kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt

+6-4
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import ch.tutteli.atrium.creating.Expect
2323
import io.kotest.core.spec.style.ShouldSpec
2424
import org.apache.spark.sql.types.ArrayType
2525
import org.apache.spark.sql.types.IntegerType
26+
import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
2627
import org.jetbrains.kotlinx.spark.api.struct.model.DataType.StructType
2728
import org.jetbrains.kotlinx.spark.api.struct.model.DataType.TypeName
2829
import org.jetbrains.kotlinx.spark.api.struct.model.ElementType.ComplexElement
@@ -35,8 +36,8 @@ import kotlin.reflect.typeOf
3536
@OptIn(ExperimentalStdlibApi::class)
3637
class TypeInferenceTest : ShouldSpec({
3738
context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema") {
38-
data class Test2<T>(val vala2: T, val para2: Pair<T, String>)
39-
data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
39+
@Sparkify data class Test2<T>(val vala2: T, val para2: Pair<T, String>)
40+
@Sparkify data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
4041

4142
val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
4243
should("contain correct typings") {
@@ -64,9 +65,10 @@ class TypeInferenceTest : ShouldSpec({
6465
}
6566
}
6667
context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema with more complex data") {
67-
data class Single<T>(val vala3: T)
68+
@Sparkify data class Single<T>(val vala3: T)
69+
@Sparkify
6870
data class Test2<T>(val vala2: T, val para2: Pair<T, Single<Double>>)
69-
data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
71+
@Sparkify data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
7072

7173
val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
7274
should("contain correct typings") {

Diff for: kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFTest.kt

+4-2
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import org.apache.spark.sql.Encoder
3333
import org.apache.spark.sql.Row
3434
import org.apache.spark.sql.expressions.Aggregator
3535
import org.intellij.lang.annotations.Language
36+
import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
3637
import org.junit.jupiter.api.assertThrows
3738
import scala.collection.Seq
3839
import java.io.Serializable
@@ -1261,8 +1262,8 @@ class UDFTest : ShouldSpec({
12611262
}
12621263
})
12631264

1264-
data class Employee(val name: String, val salary: Long)
1265-
data class Average(var sum: Long, var count: Long)
1265+
@Sparkify data class Employee(val name: String, val salary: Long)
1266+
@Sparkify data class Average(var sum: Long, var count: Long)
12661267

12671268
private object MyAverage : Aggregator<Employee, Average, Double>() {
12681269
// A zero value for this aggregation. Should satisfy the property that any b + zero = b
@@ -1316,6 +1317,7 @@ private val aggregator = aggregatorOf<Long, Average, Double>(
13161317

13171318
private val addTwoConst = { x: Int, y: Int -> x + y }
13181319

1320+
@Sparkify
13191321
data class NormalClass(
13201322
val age: Int,
13211323
val name: String

Diff for: kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt

+7-4
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import com.beust.klaxon.Converter
2323
import com.beust.klaxon.JsonObject
2424
import com.beust.klaxon.JsonValue
2525
import com.beust.klaxon.Klaxon
26+
import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
2627

2728
private fun <T> Klaxon.convert(
2829
k: kotlin.reflect.KClass<*>,
@@ -43,6 +44,7 @@ private val klaxon = Klaxon()
4344
.convert(DataType::class, { DataType.fromJson(it) }, { it.toJson() }, true)
4445
.convert(ElementType::class, { ElementType.fromJson(it) }, { it.toJson() }, true)
4546

47+
@Sparkify
4648
data class Struct(
4749
val type: String,
4850
val fields: List<StructField>? = null,
@@ -56,6 +58,7 @@ data class Struct(
5658
}
5759
}
5860

61+
@Sparkify
5962
data class StructField(
6063
val name: String,
6164
val type: DataType,
@@ -66,8 +69,8 @@ data class StructField(
6669
typealias Metadata = JsonObject
6770

6871
sealed class DataType {
69-
data class StructType(val value: Struct) : DataType()
70-
data class TypeName(val value: String) : DataType()
72+
@Sparkify data class StructType(val value: Struct) : DataType()
73+
@Sparkify data class TypeName(val value: String) : DataType()
7174

7275
public fun toJson(): String = klaxon.toJsonString(when (this) {
7376
is StructType -> this.value
@@ -84,8 +87,8 @@ sealed class DataType {
8487
}
8588

8689
sealed class ElementType {
87-
data class SimpleElement(val value: String) : ElementType()
88-
data class ComplexElement(val value: Struct) : ElementType()
90+
@Sparkify data class SimpleElement(val value: String) : ElementType()
91+
@Sparkify data class ComplexElement(val value: Struct) : ElementType()
8992

9093
public fun toJson(): String = klaxon.toJsonString(when (this) {
9194
is SimpleElement -> this.value

0 commit comments

Comments
 (0)