@@ -22,6 +22,8 @@ package org.jetbrains.kotlinx.spark.api
22
22
import ch.tutteli.atrium.api.fluent.en_GB.*
23
23
import ch.tutteli.atrium.api.verbs.expect
24
24
import io.kotest.core.spec.style.ShouldSpec
25
+ import io.kotest.matchers.collections.shouldContain
26
+ import io.kotest.matchers.collections.shouldContainExactly
25
27
import io.kotest.matchers.shouldBe
26
28
import org.apache.spark.sql.Dataset
27
29
import org.apache.spark.sql.types.Decimal
@@ -208,6 +210,39 @@ class EncodingTest : ShouldSpec({
208
210
context("schema") {
209
211
withSpark(props = mapOf("spark.sql.codegen.comments" to true)) {
210
212
213
+ context("Give proper names to columns of data classe") {
214
+ val old = KotlinTypeInference .DO_NAME_HACK
215
+ KotlinTypeInference .DO_NAME_HACK = true
216
+
217
+ should("Be able to serialize pairs") {
218
+ val pairs = listOf(
219
+ 1 to "1",
220
+ 2 to "2",
221
+ 3 to "3",
222
+ )
223
+ val dataset = pairs.toDS()
224
+ dataset.show()
225
+ dataset.collectAsList() shouldBe pairs
226
+ dataset.columns().shouldContainExactly("first", "second")
227
+ }
228
+
229
+ should("Be able to serialize pairs of pairs") {
230
+ val pairs = listOf(
231
+ 1 to (1 to "1"),
232
+ 2 to (2 to "2"),
233
+ 3 to (3 to "3"),
234
+ )
235
+ val dataset = pairs.toDS()
236
+ dataset.show()
237
+ dataset.printSchema()
238
+ dataset.columns().shouldContainExactly("first", "second")
239
+ dataset.select("second.*").columns().shouldContainExactly("first", "second")
240
+ dataset.collectAsList() shouldBe pairs
241
+ }
242
+
243
+ KotlinTypeInference .DO_NAME_HACK = old
244
+ }
245
+
211
246
should("handle Scala Case class datasets") {
212
247
val caseClasses = listOf(
213
248
tupleOf(1, "1"),
@@ -253,14 +288,14 @@ class EncodingTest : ShouldSpec({
253
288
}
254
289
255
290
256
- xshould ("handle Scala Option datasets") {
291
+ should ("handle Scala Option datasets") {
257
292
val caseClasses = listOf(Some (1), Some (2), Some (3))
258
293
val dataset = caseClasses.toDS()
259
294
dataset.show()
260
295
dataset.collectAsList() shouldBe caseClasses
261
296
}
262
297
263
- xshould ("handle Scala Option Option datasets") {
298
+ should ("handle Scala Option Option datasets") {
264
299
val caseClasses = listOf(
265
300
Some (Some (1)),
266
301
Some (Some (2)),
@@ -270,7 +305,7 @@ class EncodingTest : ShouldSpec({
270
305
dataset.collectAsList() shouldBe caseClasses
271
306
}
272
307
273
- xshould ("handle data class Scala Option datasets") {
308
+ should ("handle data class Scala Option datasets") {
274
309
val caseClasses = listOf(
275
310
Some (1) to Some (2),
276
311
Some (3) to Some (4),
@@ -280,7 +315,7 @@ class EncodingTest : ShouldSpec({
280
315
dataset.collectAsList() shouldBe caseClasses
281
316
}
282
317
283
- xshould ("handle Scala Option data class datasets") {
318
+ should ("handle Scala Option data class datasets") {
284
319
val caseClasses = listOf(
285
320
Some (1 to 2),
286
321
Some (3 to 4),
@@ -501,7 +536,7 @@ class EncodingTest : ShouldSpec({
501
536
expect(result).toContain.inOrder.only.values(5.1 to 6)
502
537
}
503
538
504
- should("! handle primitive arrays") {
539
+ should("handle boxed arrays") {
505
540
val result = listOf(arrayOf(1, 2, 3, 4))
506
541
.toDS()
507
542
.map { it.map { ai -> ai + 1 } }
0 commit comments