Skip to content

Commit 66a42ac

Browse files
committed
updating tests tests
1 parent e234f40 commit 66a42ac

File tree

5 files changed

+667
-653
lines changed

5 files changed

+667
-653
lines changed

kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ class ApiTest : ShouldSpec({
3434
withSpark(props = mapOf("spark.sql.codegen.comments" to true)) {
3535

3636
should("Create Seqs") {
37-
spark.createDataset(seqOf(1, 2, 3), encoder())
37+
spark.createDataset(seqOf(1, 2, 3), kotlinEncoderFor())
3838
.collectAsList() shouldBe listOf(1, 2, 3)
3939

4040

kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt

+13-14
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.apache.spark.sql.Dataset
2727
import org.apache.spark.sql.types.Decimal
2828
import org.apache.spark.unsafe.types.CalendarInterval
2929
import org.jetbrains.kotlinx.spark.api.tuples.*
30-
import org.jetbrains.kotlinx.spark.extensions.DemoCaseClass
3130
import scala.*
3231
import java.math.BigDecimal
3332
import java.sql.Date
@@ -211,9 +210,9 @@ class EncodingTest : ShouldSpec({
211210

212211
should("handle Scala Case class datasets") {
213212
val caseClasses = listOf(
214-
DemoCaseClass(1, "1"),
215-
DemoCaseClass(2, "2"),
216-
DemoCaseClass(3, "3"),
213+
tupleOf(1, "1"),
214+
tupleOf(2, "2"),
215+
tupleOf(3, "3"),
217216
)
218217
val dataset = caseClasses.toDS()
219218
dataset.show()
@@ -222,9 +221,9 @@ class EncodingTest : ShouldSpec({
222221

223222
should("handle Scala Case class with data class datasets") {
224223
val caseClasses = listOf(
225-
DemoCaseClass(1, "1" to 1L),
226-
DemoCaseClass(2, "2" to 2L),
227-
DemoCaseClass(3, "3" to 3L),
224+
tupleOf(1, "1" to 1L),
225+
tupleOf(2, "2" to 2L),
226+
tupleOf(3, "3" to 3L),
228227
)
229228
val dataset = caseClasses.toDS()
230229
dataset.show()
@@ -233,9 +232,9 @@ class EncodingTest : ShouldSpec({
233232

234233
should("handle data class with Scala Case class datasets") {
235234
val caseClasses = listOf(
236-
1 to DemoCaseClass(1, "1"),
237-
2 to DemoCaseClass(2, "2"),
238-
3 to DemoCaseClass(3, "3"),
235+
1 to tupleOf(1, "1"),
236+
2 to tupleOf(2, "2"),
237+
3 to tupleOf(3, "3"),
239238
)
240239
val dataset = caseClasses.toDS()
241240
dataset.show()
@@ -244,9 +243,9 @@ class EncodingTest : ShouldSpec({
244243

245244
should("handle data class with Scala Case class & deeper datasets") {
246245
val caseClasses = listOf(
247-
1 to DemoCaseClass(1, "1" to DemoCaseClass(1, 1.0)),
248-
2 to DemoCaseClass(2, "2" to DemoCaseClass(2, 2.0)),
249-
3 to DemoCaseClass(3, "3" to DemoCaseClass(3, 3.0)),
246+
1 to tupleOf(1, "1" to tupleOf(1, 1.0)),
247+
2 to tupleOf(2, "2" to tupleOf(2, 2.0)),
248+
3 to tupleOf(3, "3" to tupleOf(3, 3.0)),
250249
)
251250
val dataset = caseClasses.toDS()
252251
dataset.show()
@@ -426,7 +425,7 @@ class EncodingTest : ShouldSpec({
426425
}
427426

428427
should("Generate schema correctly with nullalble list and map") {
429-
val schema = encoder<NullFieldAbleDataClass>().schema()
428+
val schema = kotlinEncoderFor<NullFieldAbleDataClass>().schema()
430429
schema.fields().forEach {
431430
it.nullable() shouldBe true
432431
}

kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt

+24-9
Original file line numberDiff line numberDiff line change
@@ -26,18 +26,15 @@ import io.kotest.matchers.collections.shouldContainAll
2626
import io.kotest.matchers.shouldBe
2727
import org.apache.commons.io.FileUtils
2828
import org.apache.hadoop.fs.FileSystem
29+
import org.apache.hadoop.fs.Path
2930
import org.apache.spark.SparkException
30-
import org.apache.spark.streaming.Checkpoint
3131
import org.apache.spark.streaming.Duration
3232
import org.apache.spark.streaming.Durations
3333
import org.apache.spark.streaming.Time
34-
import org.apache.spark.util.Utils
3534
import org.jetbrains.kotlinx.spark.api.tuples.X
3635
import org.jetbrains.kotlinx.spark.api.tuples.component1
3736
import org.jetbrains.kotlinx.spark.api.tuples.component2
3837
import org.jetbrains.kotlinx.spark.api.tuples.t
39-
import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions
40-
import org.jetbrains.kotlinx.spark.extensions.`KSparkExtensions$`
4138
import scala.Tuple2
4239
import java.io.File
4340
import java.io.Serializable
@@ -202,18 +199,36 @@ class StreamingTest : ShouldSpec({
202199
})
203200

204201

205-
private val scalaCompatVersion = `KSparkExtensions$`.`MODULE$`.scalaCompatVersion()
206-
private val sparkVersion = `KSparkExtensions$`.`MODULE$`.sparkVersion()
207-
private fun createTempDir() = Utils.createTempDir(
202+
private val scalaCompatVersion = SCALA_COMPAT_VERSION
203+
private val sparkVersion = SPARK_VERSION
204+
private fun createTempDir() = File.createTempFile(
208205
System.getProperty("java.io.tmpdir"),
209206
"spark_${scalaCompatVersion}_${sparkVersion}"
210207
).apply { deleteOnExit() }
211208

209+
private fun checkpointFile(checkpointDir: String, checkpointTime: Time): Path {
210+
val klass = Class.forName("org.apache.spark.streaming.Checkpoint$")
211+
val moduleField = klass.getField("MODULE$").also { it.isAccessible = true }
212+
val module = moduleField.get(null)
213+
val checkpointFileMethod = klass.getMethod("checkpointFile", String::class.java, Time::class.java)
214+
.also { it.isAccessible = true }
215+
return checkpointFileMethod.invoke(module, checkpointDir, checkpointTime) as Path
216+
}
217+
218+
private fun getCheckpointFiles(checkpointDir: String, fs: scala.Option<FileSystem>): scala.collection.immutable.Seq<Path> {
219+
val klass = Class.forName("org.apache.spark.streaming.Checkpoint$")
220+
val moduleField = klass.getField("MODULE$").also { it.isAccessible = true }
221+
val module = moduleField.get(null)
222+
val getCheckpointFilesMethod = klass.getMethod("getCheckpointFiles", String::class.java, scala.Option::class.java)
223+
.also { it.isAccessible = true }
224+
return getCheckpointFilesMethod.invoke(module, checkpointDir, fs) as scala.collection.immutable.Seq<Path>
225+
}
226+
212227
private fun createCorruptedCheckpoint(): String {
213228
val checkpointDirectory = createTempDir().absolutePath
214-
val fakeCheckpointFile = Checkpoint.checkpointFile(checkpointDirectory, Time(1000))
229+
val fakeCheckpointFile = checkpointFile(checkpointDirectory, Time(1000))
215230
FileUtils.write(File(fakeCheckpointFile.toString()), "spark_corrupt_${scalaCompatVersion}_${sparkVersion}", StandardCharsets.UTF_8)
216-
assert(Checkpoint.getCheckpointFiles(checkpointDirectory, (null as FileSystem?).toOption()).nonEmpty())
231+
assert(getCheckpointFiles(checkpointDirectory, (null as FileSystem?).toOption()).nonEmpty())
217232
return checkpointDirectory
218233
}
219234

kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt

+12-12
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ class TypeInferenceTest : ShouldSpec({
3838
data class Test2<T>(val vala2: T, val para2: Pair<T, String>)
3939
data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
4040

41-
val struct = Struct.fromJson(schema(typeOf<Pair<String, Test<Int>>>()).prettyJson())!!
41+
val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
4242
should("contain correct typings") {
4343
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
4444
hasField("first", "string"),
@@ -68,7 +68,7 @@ class TypeInferenceTest : ShouldSpec({
6868
data class Test2<T>(val vala2: T, val para2: Pair<T, Single<Double>>)
6969
data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
7070

71-
val struct = Struct.fromJson(schema(typeOf<Pair<String, Test<Int>>>()).prettyJson())!!
71+
val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
7272
should("contain correct typings") {
7373
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
7474
hasField("first", "string"),
@@ -99,7 +99,7 @@ class TypeInferenceTest : ShouldSpec({
9999
context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema without generics") {
100100
data class Test(val a: String, val b: Int, val c: Double)
101101

102-
val struct = Struct.fromJson(schema(typeOf<Test>()).prettyJson())!!
102+
val struct = Struct.fromJson(kotlinEncoderFor<Test>().schema().prettyJson())!!
103103
should("return correct types too") {
104104
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
105105
hasField("a", "string"),
@@ -109,7 +109,7 @@ class TypeInferenceTest : ShouldSpec({
109109
}
110110
}
111111
context("type with list of ints") {
112-
val struct = Struct.fromJson(schema(typeOf<List<Int>>()).prettyJson())!!
112+
val struct = Struct.fromJson(kotlinEncoderFor<List<Int>>().schema().prettyJson())!!
113113
should("return correct types too") {
114114
expect(struct) {
115115
isOfType("array")
@@ -118,7 +118,7 @@ class TypeInferenceTest : ShouldSpec({
118118
}
119119
}
120120
context("type with list of Pairs int to long") {
121-
val struct = Struct.fromJson(schema(typeOf<List<Pair<Int, Long>>>()).prettyJson())!!
121+
val struct = Struct.fromJson(kotlinEncoderFor<List<Pair<Int, Long>>>().schema().prettyJson())!!
122122
should("return correct types too") {
123123
expect(struct) {
124124
isOfType("array")
@@ -134,7 +134,7 @@ class TypeInferenceTest : ShouldSpec({
134134
context("type with list of generic data class with E generic name") {
135135
data class Test<E>(val e: E)
136136

137-
val struct = Struct.fromJson(schema(typeOf<List<Test<String>>>()).prettyJson())!!
137+
val struct = Struct.fromJson(kotlinEncoderFor<List<Test<String>>>().schema().prettyJson())!!
138138
should("return correct types too") {
139139
expect(struct) {
140140
isOfType("array")
@@ -147,7 +147,7 @@ class TypeInferenceTest : ShouldSpec({
147147
}
148148
}
149149
context("type with list of list of int") {
150-
val struct = Struct.fromJson(schema(typeOf<List<List<Int>>>()).prettyJson())!!
150+
val struct = Struct.fromJson(kotlinEncoderFor<List<List<Int>>>().schema().prettyJson())!!
151151
should("return correct types too") {
152152
expect(struct) {
153153
isOfType("array")
@@ -158,7 +158,7 @@ class TypeInferenceTest : ShouldSpec({
158158
}
159159
}
160160
context("Subtypes of list") {
161-
val struct = Struct.fromJson(schema(typeOf<ArrayList<Int>>()).prettyJson())!!
161+
val struct = Struct.fromJson(kotlinEncoderFor<ArrayList<Int>>().schema().prettyJson())!!
162162
should("return correct types too") {
163163
expect(struct) {
164164
isOfType("array")
@@ -168,7 +168,7 @@ class TypeInferenceTest : ShouldSpec({
168168
}
169169
}
170170
context("Subtypes of list with nullable values") {
171-
val struct = Struct.fromJson(schema(typeOf<ArrayList<Int?>>()).prettyJson())!!
171+
val struct = Struct.fromJson(kotlinEncoderFor<ArrayList<Int?>>().schema().prettyJson())!!
172172
should("return correct types too") {
173173
expect(struct) {
174174
isOfType("array")
@@ -180,7 +180,7 @@ class TypeInferenceTest : ShouldSpec({
180180
context("data class with props in order lon → lat") {
181181
data class Test(val lon: Double, val lat: Double)
182182

183-
val struct = Struct.fromJson(schema(typeOf<Test>()).prettyJson())!!
183+
val struct = Struct.fromJson(kotlinEncoderFor<Test>().schema().prettyJson())!!
184184
should("Not change order of fields") {
185185
expect(struct.fields).notToEqualNull().containsExactly(
186186
hasField("lon", "double"),
@@ -191,7 +191,7 @@ class TypeInferenceTest : ShouldSpec({
191191
context("data class with nullable list inside") {
192192
data class Sample(val optionList: List<Int>?)
193193

194-
val struct = Struct.fromJson(schema(typeOf<Sample>()).prettyJson())!!
194+
val struct = Struct.fromJson(kotlinEncoderFor<Sample>().schema().prettyJson())!!
195195

196196
should("show that list is nullable and element is not") {
197197
expect(struct)
@@ -213,7 +213,7 @@ class TypeInferenceTest : ShouldSpec({
213213
}
214214

215215
should("generate valid serializer schema") {
216-
expect(encoder<Sample>().schema()) {
216+
expect(kotlinEncoderFor<Sample>().schema()) {
217217
this
218218
.feature("data type", { this.fields()?.toList() }) {
219219
this.notToEqualNull().toContain.inOrder.only.entry {

0 commit comments

Comments
 (0)