Skip to content

Commit 7069a9a

Browse files
committed
fixing tests
1 parent b7c1711 commit 7069a9a

File tree

9 files changed

+493
-20
lines changed

9 files changed

+493
-20
lines changed

buildSrc/src/main/kotlin/Versions.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ object Versions : Dsl<Versions> {
22
const val project = "2.0.0-SNAPSHOT"
33
const val kotlinSparkApiGradlePlugin = "2.0.0-SNAPSHOT"
44
const val groupID = "org.jetbrains.kotlinx.spark"
5-
// const val kotlin = "2.0.0-Beta5" todo issues with NonSerializable lambdas
5+
// const val kotlin = "2.0.0-Beta5" // todo issues with NonSerializable lambdas
66
const val kotlin = "1.9.23"
77
const val jvmTarget = "8"
88
const val jupyterJvmTarget = "8"

compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/DataClassPropertyAnnotationGenerator.kt

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import org.jetbrains.kotlin.ir.declarations.IrDeclaration
99
import org.jetbrains.kotlin.ir.declarations.IrFile
1010
import org.jetbrains.kotlin.ir.declarations.IrModuleFragment
1111
import org.jetbrains.kotlin.ir.declarations.IrProperty
12+
import org.jetbrains.kotlin.ir.expressions.IrBlockBody
1213
import org.jetbrains.kotlin.ir.expressions.IrConst
1314
import org.jetbrains.kotlin.ir.expressions.impl.IrConstImpl
1415
import org.jetbrains.kotlin.ir.expressions.impl.IrConstructorCallImpl
@@ -40,9 +41,13 @@ class DataClassPropertyAnnotationGenerator(
4041

4142
override fun visitElement(element: IrElement) {
4243
when (element) {
43-
is IrDeclaration,
44-
is IrFile,
45-
is IrModuleFragment -> element.acceptChildrenVoid(this)
44+
// is IrDeclaration,
45+
// is IrFile,
46+
// is IrBlockBody,
47+
// is IrModuleFragment -> element.acceptChildrenVoid(this)
48+
49+
// test for now
50+
else -> element.acceptChildrenVoid(this)
4651
}
4752
}
4853

compiler-plugin/src/test-gen/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BoxTestGenerated.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,12 @@ public void testAllFilesPresentInBox() {
2121
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box"), Pattern.compile("^(.+)\\.kt$"), null, TargetBackend.JVM_IR, true);
2222
}
2323

24+
@Test
25+
@TestMetadata("dataClassInFunctionTest.kt")
26+
public void testDataClassInFunctionTest() {
27+
runTest("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.kt");
28+
}
29+
2430
@Test
2531
@TestMetadata("dataClassTest.kt")
2632
public void testDataClassTest() {

compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.ir.txt

Lines changed: 374 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
FILE: dataClassInFunctionTest.kt
2+
package foo.bar
3+
4+
public final annotation class Sparkify : R|kotlin/Annotation| {
5+
public constructor(): R|foo/bar/Sparkify| {
6+
super<R|kotlin/Any|>()
7+
}
8+
9+
}
10+
public final annotation class ColumnName : R|kotlin/Annotation| {
11+
public constructor(name: R|kotlin/String|): R|foo/bar/ColumnName| {
12+
super<R|kotlin/Any|>()
13+
}
14+
15+
public final val name: R|kotlin/String| = R|<local>/name|
16+
public get(): R|kotlin/String|
17+
18+
}
19+
public final fun box(): R|kotlin/String| {
20+
@R|foo/bar/Sparkify|() local final data class User : R|kotlin/Any| {
21+
public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25), @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = Double(1.0), test2: R|kotlin/Double| = Double(2.0)): R|<local>/User| {
22+
super<R|kotlin/Any|>()
23+
}
24+
25+
public final val name: R|kotlin/String| = R|<local>/name|
26+
public get(): R|kotlin/String|
27+
28+
public final val age: R|kotlin/Int| = R|<local>/age|
29+
public get(): R|kotlin/Int|
30+
31+
public final val test: R|kotlin/Double| = R|<local>/test|
32+
public get(): R|kotlin/Double|
33+
34+
public final val test2: R|kotlin/Double| = R|<local>/test2|
35+
@PROPERTY_GETTER:R|foo/bar/ColumnName|(name = String(b)) public get(): R|kotlin/Double|
36+
37+
public final operator fun component1(): R|kotlin/String|
38+
39+
public final operator fun component2(): R|kotlin/Int|
40+
41+
public final operator fun component3(): R|kotlin/Double|
42+
43+
public final operator fun component4(): R|kotlin/Double|
44+
45+
public final fun copy(name: R|kotlin/String| = this@R|<local>/User|.R|<local>/name|, age: R|kotlin/Int| = this@R|<local>/User|.R|<local>/age|, @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = this@R|<local>/User|.R|<local>/test|, test2: R|kotlin/Double| = this@R|<local>/User|.R|<local>/test2|): R|<local>/User|
46+
47+
}
48+
49+
lval user: R|<local>/User| = R|<local>/User.User|()
50+
lval name: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(name)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
51+
lval age: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(age)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
52+
lval a: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(a)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
53+
lval b: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(b)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
54+
when () {
55+
!=(R|<local>/name|, String(John Doe)) || !=(R|<local>/age|, Int(25)) || !=(R|<local>/a|, Double(1.0)) || !=(R|<local>/b|, Double(2.0)) -> {
56+
^box String(Could not invoke functions name(), age(), a(), or b() from Java)
57+
}
58+
}
59+
60+
^box String(OK)
61+
}
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
package foo.bar
2+
3+
annotation class Sparkify
4+
annotation class ColumnName(val name: String)
5+
6+
fun box(): String {
7+
8+
@Sparkify
9+
data class User(
10+
val name: String = "John Doe",
11+
val age: Int = 25,
12+
@ColumnName("a") val test: Double = 1.0,
13+
@get:ColumnName("b") val test2: Double = 2.0,
14+
)
15+
16+
val user = User()
17+
val name = User::class.java.getMethod("name").invoke(user)
18+
val age = User::class.java.getMethod("age").invoke(user)
19+
val a = User::class.java.getMethod("a").invoke(user)
20+
val b = User::class.java.getMethod("b").invoke(user)
21+
22+
if (name != "John Doe" || age != 25 || a != 1.0 || b != 2.0) {
23+
return "Could not invoke functions name(), age(), a(), or b() from Java"
24+
}
25+
return "OK"
26+
}

kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -358,11 +358,11 @@ fun <T1, T2> Dataset<Tuple2<T1, T2>>.sortByValue(): Dataset<Tuple2<T1, T2>> = so
358358

359359
/** Returns a dataset sorted by the first (`first`) value of each [Pair] inside. */
360360
@JvmName("sortByPairKey")
361-
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByKey(): Dataset<Pair<T1, T2>> = sort("first")
361+
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByKey(): Dataset<Pair<T1, T2>> = sort("getFirst")
362362

363363
/** Returns a dataset sorted by the second (`second`) value of each [Pair] inside. */
364364
@JvmName("sortByPairValue")
365-
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByValue(): Dataset<Pair<T1, T2>> = sort("second")
365+
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByValue(): Dataset<Pair<T1, T2>> = sort("getSecond")
366366

367367
/**
368368
* This function creates block, where one can call any further computations on already cached dataset

kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,8 @@ class DatasetFunctionTest : ShouldSpec({
7777

7878
val first = dsOf(Left(1, "a"), Left(2, "b"))
7979
val second = dsOf(Right(1, 100), Right(3, 300))
80+
first.show()
81+
second.show()
8082
val result = first
8183
.leftJoin(second, first.col("id") eq second.col("id"))
8284
.map { it._1.id X it._1.name X it._2?.value }
@@ -211,8 +213,7 @@ class DatasetFunctionTest : ShouldSpec({
211213
s = key
212214
s shouldBe key
213215

214-
if (collected.size > 1) collected.iterator()
215-
else emptyList<Tuple2<Int, String>>().iterator()
216+
if (collected.size > 1) collected else emptyList()
216217
}
217218

218219
flatMappedWithState.count() shouldBe 2

kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class TypeInferenceTest : ShouldSpec({
3939
@Sparkify data class Test2<T>(val vala2: T, val para2: Pair<T, String>)
4040
@Sparkify data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
4141

42-
val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
42+
val struct = Struct.fromJson(schemaFor<Pair<String, Test<Int>>>().prettyJson())!!
4343
should("contain correct typings") {
4444
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
4545
hasField("first", "string"),
@@ -70,7 +70,7 @@ class TypeInferenceTest : ShouldSpec({
7070
data class Test2<T>(val vala2: T, val para2: Pair<T, Single<Double>>)
7171
@Sparkify data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
7272

73-
val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
73+
val struct = Struct.fromJson(schemaFor<Pair<String, Test<Int>>>().prettyJson())!!
7474
should("contain correct typings") {
7575
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
7676
hasField("first", "string"),
@@ -101,7 +101,7 @@ class TypeInferenceTest : ShouldSpec({
101101
context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema without generics") {
102102
data class Test(val a: String, val b: Int, val c: Double)
103103

104-
val struct = Struct.fromJson(kotlinEncoderFor<Test>().schema().prettyJson())!!
104+
val struct = Struct.fromJson(schemaFor<Test>().prettyJson())!!
105105
should("return correct types too") {
106106
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
107107
hasField("a", "string"),
@@ -111,7 +111,7 @@ class TypeInferenceTest : ShouldSpec({
111111
}
112112
}
113113
context("type with list of ints") {
114-
val struct = Struct.fromJson(kotlinEncoderFor<List<Int>>().schema().prettyJson())!!
114+
val struct = Struct.fromJson(schemaFor<List<Int>>().prettyJson())!!
115115
should("return correct types too") {
116116
expect(struct) {
117117
isOfType("array")
@@ -120,7 +120,7 @@ class TypeInferenceTest : ShouldSpec({
120120
}
121121
}
122122
context("type with list of Pairs int to long") {
123-
val struct = Struct.fromJson(kotlinEncoderFor<List<Pair<Int, Long>>>().schema().prettyJson())!!
123+
val struct = Struct.fromJson(schemaFor<List<Pair<Int, Long>>>().prettyJson())!!
124124
should("return correct types too") {
125125
expect(struct) {
126126
isOfType("array")
@@ -136,7 +136,7 @@ class TypeInferenceTest : ShouldSpec({
136136
context("type with list of generic data class with E generic name") {
137137
data class Test<E>(val e: E)
138138

139-
val struct = Struct.fromJson(kotlinEncoderFor<List<Test<String>>>().schema().prettyJson())!!
139+
val struct = Struct.fromJson(schemaFor<List<Test<String>>>().prettyJson())!!
140140
should("return correct types too") {
141141
expect(struct) {
142142
isOfType("array")
@@ -149,7 +149,7 @@ class TypeInferenceTest : ShouldSpec({
149149
}
150150
}
151151
context("type with list of list of int") {
152-
val struct = Struct.fromJson(kotlinEncoderFor<List<List<Int>>>().schema().prettyJson())!!
152+
val struct = Struct.fromJson(schemaFor<List<List<Int>>>().prettyJson())!!
153153
should("return correct types too") {
154154
expect(struct) {
155155
isOfType("array")
@@ -160,7 +160,7 @@ class TypeInferenceTest : ShouldSpec({
160160
}
161161
}
162162
context("Subtypes of list") {
163-
val struct = Struct.fromJson(kotlinEncoderFor<ArrayList<Int>>().schema().prettyJson())!!
163+
val struct = Struct.fromJson(schemaFor<ArrayList<Int>>().prettyJson())!!
164164
should("return correct types too") {
165165
expect(struct) {
166166
isOfType("array")
@@ -170,7 +170,7 @@ class TypeInferenceTest : ShouldSpec({
170170
}
171171
}
172172
context("Subtypes of list with nullable values") {
173-
val struct = Struct.fromJson(kotlinEncoderFor<ArrayList<Int?>>().schema().prettyJson())!!
173+
val struct = Struct.fromJson(schemaFor<ArrayList<Int?>>().prettyJson())!!
174174
should("return correct types too") {
175175
expect(struct) {
176176
isOfType("array")
@@ -182,7 +182,7 @@ class TypeInferenceTest : ShouldSpec({
182182
context("data class with props in order lon → lat") {
183183
data class Test(val lon: Double, val lat: Double)
184184

185-
val struct = Struct.fromJson(kotlinEncoderFor<Test>().schema().prettyJson())!!
185+
val struct = Struct.fromJson(schemaFor<Test>().prettyJson())!!
186186
should("Not change order of fields") {
187187
expect(struct.fields).notToEqualNull().containsExactly(
188188
hasField("lon", "double"),
@@ -193,7 +193,7 @@ class TypeInferenceTest : ShouldSpec({
193193
context("data class with nullable list inside") {
194194
data class Sample(val optionList: List<Int>?)
195195

196-
val struct = Struct.fromJson(kotlinEncoderFor<Sample>().schema().prettyJson())!!
196+
val struct = Struct.fromJson(schemaFor<Sample>().prettyJson())!!
197197

198198
should("show that list is nullable and element is not") {
199199
expect(struct)
@@ -215,7 +215,7 @@ class TypeInferenceTest : ShouldSpec({
215215
}
216216

217217
should("generate valid serializer schema") {
218-
expect(kotlinEncoderFor<Sample>().schema()) {
218+
expect(schemaFor<Sample>() as org.apache.spark.sql.types.StructType) {
219219
this
220220
.feature("data type", { this.fields()?.toList() }) {
221221
this.notToEqualNull().toContain.inOrder.only.entry {

0 commit comments

Comments
 (0)