diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index f770318f..c585a26f 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -11,25 +11,8 @@ jobs:
     timeout-minutes: 30
     strategy:
       matrix:
-        scala: [ "2.12.17", "2.13.10" ]
-        spark: [ "3.3.2", "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
-        exclude:
-          - scala: "2.13.10"
-            spark: "3.1.3"
-          - scala: "2.13.10"
-            spark: "3.1.2"
-          - scala: "2.13.10"
-            spark: "3.1.1"
-          - scala: "2.13.10"
-            spark: "3.1.0"
-          - scala: "2.13.10"
-            spark: "3.0.3"
-          - scala: "2.13.10"
-            spark: "3.0.2"
-          - scala: "2.13.10"
-            spark: "3.0.1"
-          - scala: "2.13.10"
-            spark: "3.0.0"
+        scala: [ "2.12.19", "2.13.13" ]
+        spark: [ "3.4.2", "3.5.1" ]
     runs-on: ubuntu-latest
 
     steps:
@@ -49,7 +32,7 @@ jobs:
             ~/.gradle/caches
             ~/.gradle/wrapper
             ~/.gradle/jdks
-          key: ${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}
+          key: "${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}"
           restore-keys: |
             ${{ runner.os }}-gradle-
 
diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml
index 40863b7b..48d2517a 100644
--- a/.github/workflows/generate_docs.yml
+++ b/.github/workflows/generate_docs.yml
@@ -26,7 +26,7 @@ jobs:
             ~/.gradle/caches
             ~/.gradle/wrapper
             ~/.gradle/jdks
-          key: ${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}
+          key: "${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}"
           restore-keys: |
             ${{ runner.os }}-gradle-
 
diff --git a/.github/workflows/publish_dev_version.yml b/.github/workflows/publish_dev_version.yml
index 4e2ab716..d1ca7e37 100644
--- a/.github/workflows/publish_dev_version.yml
+++ b/.github/workflows/publish_dev_version.yml
@@ -9,25 +9,8 @@ jobs:
   build-and-deploy:
     strategy:
       matrix:
-        scala: [ "2.12.17", "2.13.10" ]
-        spark: [ "3.3.2", "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
-        exclude:
-          - scala: "2.13.10"
-            spark: "3.1.3"
-          - scala: "2.13.10"
-            spark: "3.1.2"
-          - scala: "2.13.10"
-            spark: "3.1.1"
-          - scala: "2.13.10"
-            spark: "3.1.0"
-          - scala: "2.13.10"
-            spark: "3.0.3"
-          - scala: "2.13.10"
-            spark: "3.0.2"
-          - scala: "2.13.10"
-            spark: "3.0.1"
-          - scala: "2.13.10"
-            spark: "3.0.0"
+        scala: [ "2.12.19", "2.13.13" ]
+        spark: [ "3.4.2", "3.5.1" ]
     runs-on: ubuntu-latest
     permissions:
       contents: read
@@ -50,7 +33,7 @@ jobs:
             ~/.gradle/caches
             ~/.gradle/wrapper
             ~/.gradle/jdks
-          key: ${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}
+          key: "${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}"
           restore-keys: |
             ${{ runner.os }}-gradle-
 
@@ -72,7 +55,7 @@ jobs:
           ./gradlew 
           -Pspark=${{ matrix.spark }} 
           -Pscala=${{ matrix.scala }} 
-          -PskipScalaTuplesInKotlin=${{ !(matrix.spark == '3.0.0' || matrix.scala == '2.13.10' && matrix.spark == '3.2.0') }} 
+          -PskipScalaOnlyDependent=${{ matrix.spark != matrix.spark[0] }}
           clean 
           publishMavenPublicationToGitHubPackagesRepository 
           --scan
diff --git a/.github/workflows/publish_release_version.yml b/.github/workflows/publish_release_version.yml
index ea1998ed..f4605999 100644
--- a/.github/workflows/publish_release_version.yml
+++ b/.github/workflows/publish_release_version.yml
@@ -8,25 +8,8 @@ jobs:
   build-and-deploy-mvn-central:
     strategy:
       matrix:
-        scala: [ "2.12.17", "2.13.10" ]
-        spark: [ "3.3.2", "3.3.1", "3.3.0", "3.2.3", "3.2.2", "3.2.1", "3.2.0", "3.1.3", "3.1.2", "3.1.1", "3.1.0", "3.0.3", "3.0.2", "3.0.1", "3.0.0" ]
-        exclude:
-          - scala: "2.13.10"
-            spark: "3.1.3"
-          - scala: "2.13.10"
-            spark: "3.1.2"
-          - scala: "2.13.10"
-            spark: "3.1.1"
-          - scala: "2.13.10"
-            spark: "3.1.0"
-          - scala: "2.13.10"
-            spark: "3.0.3"
-          - scala: "2.13.10"
-            spark: "3.0.2"
-          - scala: "2.13.10"
-            spark: "3.0.1"
-          - scala: "2.13.10"
-            spark: "3.0.0"
+        scala: [ "2.12.19", "2.13.13" ]
+        spark: [ "3.4.2", "3.5.1" ]
     runs-on: ubuntu-latest
     permissions:
       contents: read
@@ -49,7 +32,7 @@ jobs:
             ~/.gradle/caches
             ~/.gradle/wrapper
             ~/.gradle/jdks
-          key: ${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}
+          key: "${{ runner.os }}-gradle-spark-${{ matrix.spark }}-${{ matrix.scala }}"
           restore-keys: |
             ${{ runner.os }}-gradle-
 
@@ -74,7 +57,7 @@ jobs:
           ./gradlew 
           -Pspark=${{ matrix.spark }} 
           -Pscala=${{ matrix.scala }} 
-          -PskipScalaTuplesInKotlin=${{ !(matrix.spark == '3.0.0' || matrix.scala == '2.13.10' && matrix.spark == '3.2.0') }} 
+          -PskipScalaOnlyDependent=${{ matrix.spark != matrix.spark[0] }} 
           clean 
           publishMavenPublicationToMavenCentralRepository
           --scan
diff --git a/build.gradle.kts b/build.gradle.kts
index 178bb7a0..d60dcf4b 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -1,5 +1,11 @@
 @file:Suppress("UnstableApiUsage")
 
+import Projects.compilerPlugin
+import Projects.gradlePlugin
+import com.github.gmazzo.buildconfig.BuildConfigExtension
+import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
+
+
 buildscript {
     repositories {
         mavenCentral()
@@ -7,15 +13,19 @@ buildscript {
     dependencies {
         classpath(jcp)
         classpath(mavenPublish)
+
+        // Allows the project to use the gradle plugin without mavenLocal
+        // Kept up-to-date by :gradle-plugin:updateBootstrapVersion
+        classpath(files("${project.rootDir.absolutePath}/gradle/bootstraps/gradle-plugin.jar"))
     }
 }
 
-
 plugins {
     mavenPublish version Versions.mavenPublish
     dokka version Versions.dokka
     idea
     kotlin version Versions.kotlin apply false
+    buildconfig version Versions.buildconfig apply false
 }
 
 group = Versions.groupID
@@ -114,4 +124,55 @@ allprojects {
             }
         }
     }
+}
+
+subprojects {
+    afterEvaluate {
+        // Adding the bootstraps directory to the repositories of the subprojects, so that
+        // the bootstrap version of compiler-plugin.jar can be found and used by the gradle-plugin
+        // without mavenLocal
+        if (plugins.hasPlugin("org.jetbrains.kotlinx.spark.api")) {
+            repositories.flatDir {
+                dirs("${project.rootDir.absolutePath}/gradle/bootstraps")
+            }
+            tasks.withType<KotlinCompile> {
+                dependsOn(":compiler-plugin:updateBootstrapVersion")
+                dependsOn(":gradle-plugin:updateBootstrapVersion")
+            }
+        }
+
+        repositories.flatDir {
+            dirs("${project.rootDir.absolutePath}/gradle/bootstraps")
+        }
+        extensions.findByType<BuildConfigExtension>()?.apply {
+            val projectVersion = Versions.project
+            val groupId = Versions.groupID
+
+            val compilerPluginArtifactId = compilerPlugin.name
+            val gradlePluginArtifactId = gradlePlugin.name
+
+            val compilerPluginId = "$groupId.api"
+
+            val defaultSparkifyFqName = "$groupId.api.plugin.annotations.Sparkify"
+            val defaultColumnNameFqName = "$groupId.api.plugin.annotations.ColumnName"
+
+            val projectRoot = project.rootDir.absolutePath
+
+            packageName("$groupId.api")
+            className("Artifacts")
+
+            buildConfigField("compilerPluginId", compilerPluginId)
+            buildConfigField("groupId", groupId)
+            buildConfigField("gradlePluginArtifactId", gradlePluginArtifactId)
+            buildConfigField("projectVersion", projectVersion)
+            buildConfigField("compilerPluginArtifactId", compilerPluginArtifactId)
+
+            buildConfigField("defaultSparkifyFqName", defaultSparkifyFqName)
+            buildConfigField("defaultColumnNameFqName", defaultColumnNameFqName)
+            buildConfigField("projectRoot", projectRoot)
+
+            buildConfigField("scalaVersion", Versions.scala)
+            buildConfigField("sparkVersion", Versions.spark)
+        }
+    }
 }
\ No newline at end of file
diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts
index 1c7fefc4..656a8d0c 100644
--- a/buildSrc/build.gradle.kts
+++ b/buildSrc/build.gradle.kts
@@ -1,10 +1,8 @@
-import org.gradle.kotlin.dsl.`kotlin-dsl`
-import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
-
 plugins {
     `kotlin-dsl`
 }
 
 repositories {
     mavenCentral()
+    maven("https://maven.pkg.jetbrains.space/kotlin/p/kotlin/bootstrap")
 }
diff --git a/buildSrc/src/main/kotlin/Dependencies.kt b/buildSrc/src/main/kotlin/Dependencies.kt
index 472a18ed..d19181c8 100644
--- a/buildSrc/src/main/kotlin/Dependencies.kt
+++ b/buildSrc/src/main/kotlin/Dependencies.kt
@@ -1,15 +1,26 @@
-object Dependencies {
+object Dependencies : Dsl<Dependencies> {
     inline val kotlinStdLib get() = "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${Versions.kotlin}"
     inline val reflect get() = "org.jetbrains.kotlin:kotlin-reflect:${Versions.kotlin}"
     inline val scalaLibrary get() = "org.scala-lang:scala-library:${Versions.scala}"
     inline val kotlinxHtml get() = "org.jetbrains.kotlinx:kotlinx-html-jvm:${Versions.kotlinxHtml}"
     inline val sparkSql get() = "org.apache.spark:spark-sql_${Versions.scalaCompat}:${Versions.spark}"
+    inline val sparkSqlApi get() = "org.apache.spark:spark-sql-api_${Versions.scalaCompat}:${Versions.spark}"
+    inline val sparkConnectClient get() = "org.apache.spark:spark-connect-client-jvm_${Versions.scalaCompat}:${Versions.spark}"
     inline val sparkMl get() = "org.apache.spark:spark-mllib_${Versions.scalaCompat}:${Versions.spark}"
     inline val sparkStreaming get() = "org.apache.spark:spark-streaming_${Versions.scalaCompat}:${Versions.spark}"
     inline val hadoopClient get() = "org.apache.hadoop:hadoop-client:${Versions.hadoop}"
     inline val sparkRepl get() = "org.apache.spark:spark-repl_${Versions.scalaCompat}:${Versions.spark}"
     inline val jupyter get() = "org.jetbrains.kotlinx:kotlin-jupyter-api:${Versions.jupyter}"
-    inline val junit get() = "org.junit.jupiter:junit-jupiter-engine:5.8.1"
+    inline val junitJupiterEngine get() = "org.junit.jupiter:junit-jupiter-engine:${Versions.junitJupiterEngine}"
+    // must be platform()
+    inline val junitBom get() = "org.junit:junit-bom:${Versions.junitJupiterEngine}"
+    inline val junitJupiter get() = "org.junit.jupiter:junit-jupiter"
+    inline val junitPlatformCommons get() = "org.junit.platform:junit-platform-commons"
+    inline val junitPlatformLauncher get() = "org.junit.platform:junit-platform-launcher"
+    inline val junitPlatformRunner get() = "org.junit.platform:junit-platform-runner"
+    inline val junitPlatformSuiteApi get() = "org.junit.platform:junit-platform-suite-api"
+
+    inline val junit get() = "junit:junit:${Versions.junit}"
     inline val sparkStreamingKafka get() = "org.apache.spark:spark-streaming-kafka-0-10_${Versions.scalaCompat}:${Versions.spark}"
     inline val kotest get() = "io.kotest:kotest-runner-junit5:${Versions.kotest}"
     inline val kotestTestcontainers get() = "io.kotest.extensions:kotest-extensions-testcontainers:${Versions.kotestTestContainers}"
@@ -21,6 +32,12 @@ object Dependencies {
     inline val kotlinScriptingCommon get() = "org.jetbrains.kotlin:kotlin-scripting-common"
     inline val kotlinScriptingJvm get() = "org.jetbrains.kotlin:kotlin-scripting-jvm"
     inline val jacksonDatabind get() = "com.fasterxml.jackson.core:jackson-databind:${Versions.jacksonDatabind}"
+    inline val kotlinDateTime get() = "org.jetbrains.kotlinx:kotlinx-datetime:${Versions.kotlinxDateTime}"
+    inline val kotlinCompiler get() = "org.jetbrains.kotlin:kotlin-compiler:${Versions.kotlin}"
+    inline val kotlinScriptRuntime get() = "org.jetbrains.kotlin:kotlin-script-runtime:${Versions.kotlin}"
+    inline val kotlinAnnotationsJvm get() = "org.jetbrains.kotlin:kotlin-annotations-jvm:${Versions.kotlin}"
+    inline val kotlinCompilerInternalTestFramework get() = "org.jetbrains.kotlin:kotlin-compiler-internal-test-framework:${Versions.kotlin}"
+    inline val kotlinGradlePlugin get() = "org.jetbrains.kotlin:kotlin-gradle-plugin"
 }
 
 
diff --git a/buildSrc/src/main/kotlin/Helpers.kt b/buildSrc/src/main/kotlin/Helpers.kt
index e62a8dae..6c903b17 100644
--- a/buildSrc/src/main/kotlin/Helpers.kt
+++ b/buildSrc/src/main/kotlin/Helpers.kt
@@ -2,6 +2,11 @@ import org.gradle.api.artifacts.Dependency
 import org.gradle.api.artifacts.ProjectDependency
 import org.gradle.api.artifacts.dsl.DependencyHandler
 
+interface Dsl<T> {
+    @Suppress("UNCHECKED_CAST")
+    operator fun invoke(block: T.() -> Unit) = block(this as T)
+}
+
 fun DependencyHandler.testApi(vararg dependencyNotations: Any): List<Dependency?> =
     dependencyNotations.map {
         add("testApi", it)
@@ -18,6 +23,11 @@ fun DependencyHandler.testImplementation(vararg dependencyNotations: Any): List<
         add("testImplementation", it)
     }
 
+fun DependencyHandler.testRuntimeOnly(vararg dependencyNotations: Any): List<Dependency?> =
+    dependencyNotations.map {
+        add("testRuntimeOnly", it)
+    }
+
 fun DependencyHandler.implementation(vararg dependencyNotations: Any): List<Dependency?> =
     dependencyNotations.map {
         add("implementation", it)
@@ -28,10 +38,15 @@ fun DependencyHandler.runtimeOnly(vararg dependencyNotations: Any): List<Depende
         add("runtimeOnly", it)
     }
 
+fun DependencyHandler.compileOnly(vararg dependencyNotations: Any): List<Dependency?> =
+    dependencyNotations.map {
+        add("compileOnly", it)
+    }
+
 fun DependencyHandler.project(
     path: String,
     configuration: String? = null
 ): ProjectDependency = project(
     if (configuration != null) mapOf("path" to path, "configuration" to configuration)
     else mapOf("path" to path)
-) as ProjectDependency
+) as ProjectDependency
\ No newline at end of file
diff --git a/buildSrc/src/main/kotlin/Plugins.kt b/buildSrc/src/main/kotlin/Plugins.kt
index 354727db..59e273b0 100644
--- a/buildSrc/src/main/kotlin/Plugins.kt
+++ b/buildSrc/src/main/kotlin/Plugins.kt
@@ -2,6 +2,8 @@ import org.gradle.api.Project
 import org.gradle.kotlin.dsl.*
 import org.gradle.plugin.use.PluginDependenciesSpec
 
+inline val PluginDependenciesSpec.kotlinSparkApi
+    get() = id("org.jetbrains.kotlinx.spark.api")
 
 inline val PluginDependenciesSpec.kotlin
     get() = kotlin("jvm")
@@ -33,3 +35,11 @@ inline val Project.mavenPublishBase
 inline val PluginDependenciesSpec.jupyter
     get() = kotlin("jupyter.api") version Versions.jupyter
 
+inline val PluginDependenciesSpec.buildconfig
+    get() = id("com.github.gmazzo.buildconfig")
+
+inline val PluginDependenciesSpec.gradlePublishPlugin
+    get() = id("com.gradle.plugin-publish") version Versions.gradlePublishPlugin
+
+inline val PluginDependenciesSpec.shadow
+    get() = id("com.github.johnrengelman.shadow") version Versions.shadow
diff --git a/buildSrc/src/main/kotlin/Projects.kt b/buildSrc/src/main/kotlin/Projects.kt
index 3febd570..a8c02070 100644
--- a/buildSrc/src/main/kotlin/Projects.kt
+++ b/buildSrc/src/main/kotlin/Projects.kt
@@ -1,10 +1,8 @@
 @file:Suppress("NOTHING_TO_INLINE")
 
 import org.gradle.api.Project
-import org.gradle.api.artifacts.dsl.DependencyHandler
-import org.gradle.kotlin.dsl.support.delegates.ProjectDelegate
 
-object Projects {
+object Projects : Dsl<Projects> {
 
     inline fun Project.searchProject(name: String): Project =
         rootProject
@@ -17,8 +15,8 @@ object Projects {
     inline val Project.kotlinSparkApi
         get() = searchProject("kotlin-spark-api")
 
-    inline val Project.core
-        get() = searchProject("core")
+    inline val Project.scalaHelpers
+        get() = searchProject("scala-helpers")
 
     inline val Project.examples
         get() = searchProject("examples")
@@ -28,4 +26,10 @@ object Projects {
 
     inline val Project.scalaTuplesInKotlin
         get() = searchProject("scala-tuples-in-kotlin")
+
+    inline val Project.compilerPlugin
+        get() = searchProject("compiler-plugin")
+
+    inline val Project.gradlePlugin
+        get() = searchProject("gradle-plugin")
 }
\ No newline at end of file
diff --git a/buildSrc/src/main/kotlin/Versions.kt b/buildSrc/src/main/kotlin/Versions.kt
index a4042eb3..64b0d510 100644
--- a/buildSrc/src/main/kotlin/Versions.kt
+++ b/buildSrc/src/main/kotlin/Versions.kt
@@ -1,17 +1,27 @@
-object Versions {
-    const val project = "1.2.5-SNAPSHOT"
+object Versions : Dsl<Versions> {
+    const val project = "2.0.0-SNAPSHOT"
+    const val kotlinSparkApiGradlePlugin = "2.0.0-SNAPSHOT"
     const val groupID = "org.jetbrains.kotlinx.spark"
-    const val kotlin = "1.8.20"
+    const val kotlin = "2.0.0-RC3"
     const val jvmTarget = "8"
     const val jupyterJvmTarget = "8"
-
     inline val spark get() = System.getProperty("spark") as String
     inline val scala get() = System.getProperty("scala") as String
     inline val sparkMinor get() = spark.substringBeforeLast('.')
+
     inline val scalaCompat get() = scala.substringBeforeLast('.')
+    // TODO
+    inline val sparkConnect get() = System.getProperty("sparkConnect", "false").toBoolean()
+    const val jupyter = "0.12.0-163" // latest jupyter version with java 8
+
+    const val gradlePublishPlugin = "1.1.0"
+    const val kotest = "5.9.0"
+    const val shadow = "8.1.1"
 
-    const val jupyter = "0.12.0-32-1"
-    const val kotest = "5.5.4"
+    const val buildconfig = "5.3.5"
+
+    const val junitJupiterEngine = "5.8.1"
+    const val junit = "4.13.2"
     const val kotestTestContainers = "1.3.3"
     const val dokka = "1.8.20"
     const val jcp = "7.0.5"
@@ -23,8 +33,9 @@ object Versions {
     const val kotlinxHtml = "0.7.5"
     const val klaxon = "5.5"
     const val jacksonDatabind = "2.13.4.2"
+    const val kotlinxDateTime = "0.6.0"
 
-    inline val versionMap
+    inline val versionMap: Map<String, String>
         get() = mapOf(
             "kotlin" to kotlin,
             "scala" to scala,
@@ -32,6 +43,6 @@ object Versions {
             "spark" to spark,
             "sparkMinor" to sparkMinor,
             "version" to project,
+            "sparkConnect" to sparkConnect.toString(),
         )
-
 }
diff --git a/compiler-plugin/build.gradle.kts b/compiler-plugin/build.gradle.kts
new file mode 100644
index 00000000..02aac811
--- /dev/null
+++ b/compiler-plugin/build.gradle.kts
@@ -0,0 +1,126 @@
+import org.jetbrains.kotlin.gradle.dsl.KotlinVersion
+import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
+
+plugins {
+    java
+    kotlin
+    mavenPublish
+    buildconfig
+}
+
+group = Versions.groupID
+version = Versions.project
+
+repositories {
+    mavenCentral()
+    maven("https://maven.pkg.jetbrains.space/kotlin/p/kotlin/bootstrap")
+}
+
+sourceSets {
+    test {
+        val srcDirs = listOf("src/test-gen/kotlin")
+        kotlin.srcDirs(srcDirs)
+        java.srcDirs(srcDirs)
+    }
+}
+
+dependencies {
+    Dependencies {
+        compileOnly(kotlinCompiler)
+
+        testRuntimeOnly(
+            kotlinTest,
+            kotlinScriptRuntime,
+            kotlinAnnotationsJvm,
+        )
+
+        testImplementation(
+            kotlinCompiler,
+            reflect,
+            kotlinCompilerInternalTestFramework,
+            junit,
+
+            platform(junitBom),
+            junitJupiter,
+            junitPlatformCommons,
+            junitPlatformLauncher,
+            junitPlatformRunner,
+            junitPlatformSuiteApi,
+        )
+    }
+}
+
+tasks.test {
+    useJUnitPlatform()
+    doFirst {
+        setLibraryProperty("org.jetbrains.kotlin.test.kotlin-stdlib", "kotlin-stdlib")
+        setLibraryProperty("org.jetbrains.kotlin.test.kotlin-stdlib-jdk8", "kotlin-stdlib-jdk8")
+        setLibraryProperty("org.jetbrains.kotlin.test.kotlin-reflect", "kotlin-reflect")
+        setLibraryProperty("org.jetbrains.kotlin.test.kotlin-test", "kotlin-test")
+        setLibraryProperty("org.jetbrains.kotlin.test.kotlin-script-runtime", "kotlin-script-runtime")
+        setLibraryProperty("org.jetbrains.kotlin.test.kotlin-annotations-jvm", "kotlin-annotations-jvm")
+    }
+}
+
+tasks.withType<KotlinCompile>().configureEach {
+    compilerOptions {
+        freeCompilerArgs.addAll(
+            "-opt-in=org.jetbrains.kotlin.compiler.plugin.ExperimentalCompilerApi",
+            "-Xcontext-receivers"
+        )
+        languageVersion = KotlinVersion.KOTLIN_2_0
+    }
+}
+
+kotlin {
+    jvmToolchain {
+        languageVersion = JavaLanguageVersion.of(8)
+    }
+}
+java {
+    toolchain {
+        languageVersion = JavaLanguageVersion.of(8)
+    }
+}
+
+val generateTests by tasks.creating(JavaExec::class) {
+    classpath = sourceSets.test.get().runtimeClasspath
+    mainClass.set("org.jetbrains.kotlinx.spark.api.compilerPlugin.GenerateTestsKt")
+}
+
+val compileTestKotlin by tasks.getting {
+    doLast {
+        generateTests.exec()
+    }
+}
+
+fun Test.setLibraryProperty(propName: String, jarName: String) {
+    val path = project.configurations
+        .testRuntimeClasspath.get()
+        .files
+        .find { """$jarName-\d.*jar""".toRegex().matches(it.name) }
+        ?.absolutePath
+        ?: return
+    systemProperty(propName, path)
+}
+
+/**
+ * Copies the built jar file to the gradle/bootstraps directory.
+ * This allows the project to use the compiler plugin without mavenLocal.
+ */
+val updateBootstrapVersion by tasks.creating(Copy::class) {
+    group = "build"
+    dependsOn(tasks.jar)
+
+    val jarFile = tasks.jar.get().outputs.files.files.single {
+        it.extension == "jar" && it.name.startsWith("compiler-plugin")
+    }
+    from(jarFile)
+    rename { "compiler-plugin.jar" }
+    into(project.rootDir.resolve("gradle/bootstraps"))
+    outputs.upToDateWhen { false }
+}
+
+tasks.build {
+    finalizedBy(updateBootstrapVersion)
+}
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyCommandLineProcessor.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyCommandLineProcessor.kt
new file mode 100644
index 00000000..b0e29d8f
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyCommandLineProcessor.kt
@@ -0,0 +1,72 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin
+
+import org.jetbrains.kotlin.compiler.plugin.AbstractCliOption
+import org.jetbrains.kotlin.compiler.plugin.CliOption
+import org.jetbrains.kotlin.compiler.plugin.CommandLineProcessor
+import org.jetbrains.kotlin.config.CompilerConfiguration
+import org.jetbrains.kotlin.config.CompilerConfigurationKey
+import org.jetbrains.kotlinx.spark.api.Artifacts
+
+open class SparkifyCommandLineProcessor : CommandLineProcessor {
+
+    init {
+        println("SparkifyCommandLineProcessor loaded")
+    }
+
+    override val pluginId: String = Artifacts.compilerPluginId
+
+    override val pluginOptions: Collection<AbstractCliOption> = listOf(
+        OPTION_ENABLED,
+        OPTION_SPARKIFY_ANNOTATION_FQ_NAMES,
+        OPTION_COLUMN_NAME_ANNOTATION_FQ_NAMES,
+    )
+
+    override fun processOption(option: AbstractCliOption, value: String, configuration: CompilerConfiguration) {
+        when (val optionName = option.optionName) {
+            OPTION_ENABLED.optionName ->
+                configuration.put(KEY_ENABLED, value.toBoolean())
+
+            OPTION_SPARKIFY_ANNOTATION_FQ_NAMES.optionName ->
+                configuration.put(KEY_SPARKIFY_ANNOTATION_FQ_NAMES, value.split(",").map { it.trim() })
+
+            OPTION_COLUMN_NAME_ANNOTATION_FQ_NAMES.optionName ->
+                configuration.put(KEY_COLUMN_NAME_ANNOTATION_FQ_NAMES, value.split(",").map { it.trim() })
+
+            else -> error("Unexpected option: $optionName")
+        }
+    }
+}
+
+internal val KEY_ENABLED = CompilerConfigurationKey<Boolean>("Whether to enable Sparkify")
+
+internal val OPTION_ENABLED = CliOption(
+    optionName = "enabled",
+    valueDescription = "<true|false>",
+    description = "Whether to enable Sparkify",
+    required = false,
+    allowMultipleOccurrences = false,
+)
+
+internal val KEY_SPARKIFY_ANNOTATION_FQ_NAMES = CompilerConfigurationKey<List<String>>(
+    "Fully qualified names of annotations for Sparkify"
+)
+
+internal val OPTION_SPARKIFY_ANNOTATION_FQ_NAMES = CliOption(
+    optionName = "sparkifyAnnotationFqNames",
+    valueDescription = "<fqName1,fqName2,...>",
+    description = "Fully qualified names of annotations to sparkify",
+    required = false,
+    allowMultipleOccurrences = false,
+)
+
+internal val KEY_COLUMN_NAME_ANNOTATION_FQ_NAMES = CompilerConfigurationKey<List<String>>(
+    "Fully qualified names of annotations for ColumnName"
+)
+
+internal val OPTION_COLUMN_NAME_ANNOTATION_FQ_NAMES = CliOption(
+    optionName = "columnNameAnnotationFqNames",
+    valueDescription = "<fqName1,fqName2,...>",
+    description = "Fully qualified names of annotations for ColumnName",
+    required = false,
+    allowMultipleOccurrences = false,
+)
\ No newline at end of file
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyCompilerPluginRegistrar.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyCompilerPluginRegistrar.kt
new file mode 100644
index 00000000..f2456d50
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyCompilerPluginRegistrar.kt
@@ -0,0 +1,48 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin
+
+import org.jetbrains.kotlin.backend.common.extensions.IrGenerationExtension
+import org.jetbrains.kotlin.compiler.plugin.CompilerPluginRegistrar
+import org.jetbrains.kotlin.config.CompilerConfiguration
+import org.jetbrains.kotlin.fir.extensions.FirExtensionRegistrar
+import org.jetbrains.kotlin.fir.extensions.FirExtensionRegistrarAdapter
+import org.jetbrains.kotlinx.spark.api.Artifacts
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.ir.SparkifyIrGenerationExtension
+
+open class SparkifyCompilerPluginRegistrar : CompilerPluginRegistrar() {
+    init {
+        println("SparkifyCompilerPluginRegistrar loaded")
+    }
+
+    override val supportsK2: Boolean
+        get() = true
+
+    override fun ExtensionStorage.registerExtensions(configuration: CompilerConfiguration) {
+        if (configuration.get(KEY_ENABLED) != true) return
+
+        val sparkifyAnnotationFqNames = configuration.get(KEY_SPARKIFY_ANNOTATION_FQ_NAMES)
+            ?: listOf(Artifacts.defaultSparkifyFqName)
+
+        val columnNameAnnotationFqNames = configuration.get(KEY_COLUMN_NAME_ANNOTATION_FQ_NAMES)
+            ?: listOf(Artifacts.defaultColumnNameFqName)
+
+        val productFqNames = // TODO: get from configuration
+            listOf("scala.Product")
+
+        // Front end (FIR)
+//        FirExtensionRegistrarAdapter.registerExtension(
+//            SparkifyFirPluginRegistrar(
+//                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+//                productFqNames = productFqNames,
+//            )
+//        )
+
+        // Intermediate Representation IR
+        IrGenerationExtension.registerExtension(
+            SparkifyIrGenerationExtension(
+                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+                columnNameAnnotationFqNames = columnNameAnnotationFqNames,
+                productFqNames = productFqNames,
+            )
+        )
+    }
+}
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyFirPluginRegistrar.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyFirPluginRegistrar.kt
new file mode 100644
index 00000000..aca02af7
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/SparkifyFirPluginRegistrar.kt
@@ -0,0 +1,23 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin
+
+import org.jetbrains.kotlin.fir.extensions.FirExtensionRegistrar
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.fir.DataClassSparkifyFunctionsGenerator
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.fir.DataClassSparkifySuperTypeGenerator
+
+// Potential future K2 FIR hook
+// TODO
+class SparkifyFirPluginRegistrar(
+    private val sparkifyAnnotationFqNames: List<String>,
+    private val productFqNames: List<String>
+) : FirExtensionRegistrar() {
+    override fun ExtensionRegistrarContext.configurePlugin() {
+        +DataClassSparkifySuperTypeGenerator.builder(
+            sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+            productFqNames = productFqNames,
+        )
+        +DataClassSparkifyFunctionsGenerator.builder(
+            sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+            productFqNames = productFqNames,
+        )
+    }
+}
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/fir/DataClassSparkifyFunctionsGenerator.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/fir/DataClassSparkifyFunctionsGenerator.kt
new file mode 100644
index 00000000..007c6e2c
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/fir/DataClassSparkifyFunctionsGenerator.kt
@@ -0,0 +1,115 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.fir
+
+import org.jetbrains.kotlin.GeneratedDeclarationKey
+import org.jetbrains.kotlin.fir.FirSession
+import org.jetbrains.kotlin.fir.declarations.utils.isData
+import org.jetbrains.kotlin.fir.extensions.FirDeclarationGenerationExtension
+import org.jetbrains.kotlin.fir.extensions.MemberGenerationContext
+import org.jetbrains.kotlin.fir.plugin.createMemberFunction
+import org.jetbrains.kotlin.fir.render
+import org.jetbrains.kotlin.fir.resolve.getSuperTypes
+import org.jetbrains.kotlin.fir.symbols.impl.FirClassSymbol
+import org.jetbrains.kotlin.fir.symbols.impl.FirNamedFunctionSymbol
+import org.jetbrains.kotlin.fir.types.toClassSymbol
+import org.jetbrains.kotlin.name.CallableId
+import org.jetbrains.kotlin.name.Name
+
+class DataClassSparkifyFunctionsGenerator(
+    session: FirSession,
+    private val sparkifyAnnotationFqNames: List<String>,
+    private val productFqNames: List<String>,
+) : FirDeclarationGenerationExtension(session) {
+
+    companion object {
+        fun builder(
+            sparkifyAnnotationFqNames: List<String>,
+            productFqNames: List<String>
+        ): (FirSession) -> FirDeclarationGenerationExtension = {
+            DataClassSparkifyFunctionsGenerator(
+                session = it,
+                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+                productFqNames = productFqNames,
+            )
+        }
+
+        // functions to generate
+        val canEqual = Name.identifier("canEqual")
+        val productElement = Name.identifier("productElement")
+        val productArity = Name.identifier("productArity")
+    }
+
+    override fun generateFunctions(
+        callableId: CallableId,
+        context: MemberGenerationContext?
+    ): List<FirNamedFunctionSymbol> {
+        val owner = context?.owner ?: return emptyList()
+
+        val functionName = callableId.callableName
+        val superTypes = owner.getSuperTypes(session)
+        val superProduct = superTypes.first {
+            it.toString().endsWith("Product")
+        }.toClassSymbol(session)!!
+        val superEquals = superTypes.first {
+            it.toString().endsWith("Equals")
+        }.toClassSymbol(session)!!
+
+        val function = when (functionName) {
+            canEqual -> {
+                val func = createMemberFunction(
+                    owner = owner,
+                    key = Key,
+                    name = functionName,
+                    returnType = session.builtinTypes.booleanType.type,
+                ) {
+                    valueParameter(
+                        name = Name.identifier("that"),
+                        type = session.builtinTypes.nullableAnyType.type,
+                    )
+                }
+//                val superFunction = superEquals.declarationSymbols.first {
+//                    it is FirNamedFunctionSymbol && it.name == functionName
+//                } as FirNamedFunctionSymbol
+//                overrides(func, superFunction)
+                func
+            }
+
+            productElement -> {
+                createMemberFunction(
+                    owner = owner,
+                    key = Key,
+                    name = functionName,
+                    returnType = session.builtinTypes.nullableAnyType.type,
+                ) {
+                    valueParameter(
+                        name = Name.identifier("n"),
+                        type = session.builtinTypes.intType.type,
+                    )
+                }
+            }
+
+            productArity -> {
+                createMemberFunction(
+                    owner = owner,
+                    key = Key,
+                    name = functionName,
+                    returnType = session.builtinTypes.intType.type,
+                )
+            }
+
+            else -> {
+                return emptyList()
+            }
+        }
+
+        return listOf(function.symbol)
+    }
+
+    override fun getCallableNamesForClass(classSymbol: FirClassSymbol<*>, context: MemberGenerationContext): Set<Name> =
+        if (classSymbol.isData && classSymbol.annotations.any { "Sparkify" in it.render() }) {
+            setOf(canEqual, productElement, productArity)
+        } else {
+            emptySet()
+        }
+
+    object Key : GeneratedDeclarationKey()
+}
\ No newline at end of file
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/fir/DataClassSparkifySuperTypeGenerator.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/fir/DataClassSparkifySuperTypeGenerator.kt
new file mode 100644
index 00000000..3a13c458
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/fir/DataClassSparkifySuperTypeGenerator.kt
@@ -0,0 +1,58 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.fir
+
+import org.jetbrains.kotlin.fir.FirSession
+import org.jetbrains.kotlin.fir.declarations.FirClassLikeDeclaration
+import org.jetbrains.kotlin.fir.declarations.utils.isData
+import org.jetbrains.kotlin.fir.extensions.FirSupertypeGenerationExtension
+import org.jetbrains.kotlin.fir.render
+import org.jetbrains.kotlin.fir.resolve.fqName
+import org.jetbrains.kotlin.fir.symbols.impl.ConeClassLikeLookupTagImpl
+import org.jetbrains.kotlin.fir.types.FirResolvedTypeRef
+import org.jetbrains.kotlin.fir.types.builder.buildResolvedTypeRef
+import org.jetbrains.kotlin.fir.types.impl.ConeClassLikeTypeImpl
+import org.jetbrains.kotlin.name.ClassId
+import org.jetbrains.kotlin.name.FqName
+
+/**
+ * This class tells the FIR that all @Sparkify annotated data classes
+ * get [scala.Product] as their super type.
+ */
+class DataClassSparkifySuperTypeGenerator(
+    session: FirSession,
+    private val sparkifyAnnotationFqNames: List<String>,
+    private val productFqNames: List<String>,
+) : FirSupertypeGenerationExtension(session) {
+
+    companion object {
+        fun builder(sparkifyAnnotationFqNames: List<String>, productFqNames: List<String>): (FirSession) -> FirSupertypeGenerationExtension = {
+            DataClassSparkifySuperTypeGenerator(
+                session = it,
+                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+                productFqNames = productFqNames,
+            )
+        }
+    }
+
+    override fun computeAdditionalSupertypes(
+        classLikeDeclaration: FirClassLikeDeclaration,
+        resolvedSupertypes: List<FirResolvedTypeRef>,
+        typeResolver: TypeResolveService,
+    ): List<FirResolvedTypeRef> = listOf(
+        buildResolvedTypeRef {
+            val scalaProduct = productFqNames.first().let {
+                ClassId.topLevel(FqName(it))
+            }
+            type = ConeClassLikeTypeImpl(
+                lookupTag = ConeClassLikeLookupTagImpl(scalaProduct),
+                typeArguments = emptyArray(),
+                isNullable = false,
+            )
+        }
+    )
+
+    override fun needTransformSupertypes(declaration: FirClassLikeDeclaration): Boolean =
+        declaration.symbol.isData &&
+                declaration.annotations.any {
+                    "Sparkify" in it.render()
+                }
+}
\ No newline at end of file
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/DataClassSparkifyGenerator.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/DataClassSparkifyGenerator.kt
new file mode 100644
index 00000000..c27050d0
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/DataClassSparkifyGenerator.kt
@@ -0,0 +1,377 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.ir
+
+import org.jetbrains.kotlin.backend.common.extensions.IrPluginContext
+import org.jetbrains.kotlin.backend.common.ir.addDispatchReceiver
+import org.jetbrains.kotlin.backend.common.lower.createIrBuilder
+import org.jetbrains.kotlin.backend.common.lower.irThrow
+import org.jetbrains.kotlin.descriptors.Modality
+import org.jetbrains.kotlin.ir.IrElement
+import org.jetbrains.kotlin.ir.UNDEFINED_OFFSET
+import org.jetbrains.kotlin.ir.backend.js.utils.valueArguments
+import org.jetbrains.kotlin.ir.builders.declarations.addFunction
+import org.jetbrains.kotlin.ir.builders.declarations.addValueParameter
+import org.jetbrains.kotlin.ir.builders.irBlockBody
+import org.jetbrains.kotlin.ir.builders.irBranch
+import org.jetbrains.kotlin.ir.builders.irCall
+import org.jetbrains.kotlin.ir.builders.irElseBranch
+import org.jetbrains.kotlin.ir.builders.irEquals
+import org.jetbrains.kotlin.ir.builders.irGet
+import org.jetbrains.kotlin.ir.builders.irIs
+import org.jetbrains.kotlin.ir.builders.irReturn
+import org.jetbrains.kotlin.ir.builders.irWhen
+import org.jetbrains.kotlin.ir.declarations.IrClass
+import org.jetbrains.kotlin.ir.declarations.IrProperty
+import org.jetbrains.kotlin.ir.expressions.IrConst
+import org.jetbrains.kotlin.ir.expressions.IrStatementOrigin
+import org.jetbrains.kotlin.ir.expressions.impl.IrConstImpl
+import org.jetbrains.kotlin.ir.expressions.impl.IrConstructorCallImpl
+import org.jetbrains.kotlin.ir.symbols.UnsafeDuringIrConstructionAPI
+import org.jetbrains.kotlin.ir.types.classFqName
+import org.jetbrains.kotlin.ir.types.classOrNull
+import org.jetbrains.kotlin.ir.types.defaultType
+import org.jetbrains.kotlin.ir.types.superTypes
+import org.jetbrains.kotlin.ir.util.constructors
+import org.jetbrains.kotlin.ir.util.defaultType
+import org.jetbrains.kotlin.ir.util.functions
+import org.jetbrains.kotlin.ir.util.hasAnnotation
+import org.jetbrains.kotlin.ir.util.isAnnotationWithEqualFqName
+import org.jetbrains.kotlin.ir.util.parentAsClass
+import org.jetbrains.kotlin.ir.util.primaryConstructor
+import org.jetbrains.kotlin.ir.util.properties
+import org.jetbrains.kotlin.ir.util.toIrConst
+import org.jetbrains.kotlin.ir.visitors.IrElementVisitorVoid
+import org.jetbrains.kotlin.ir.visitors.acceptChildrenVoid
+import org.jetbrains.kotlin.name.ClassId
+import org.jetbrains.kotlin.name.FqName
+import org.jetbrains.kotlin.name.Name
+import org.jetbrains.kotlin.name.SpecialNames
+
+class DataClassSparkifyGenerator(
+    private val pluginContext: IrPluginContext,
+    private val sparkifyAnnotationFqNames: List<String>,
+    private val columnNameAnnotationFqNames: List<String>,
+    private val productFqNames: List<String>,
+) : IrElementVisitorVoid {
+
+    init {
+        require(sparkifyAnnotationFqNames.isNotEmpty()) {
+            "At least one sparkify annotation must be provided"
+        }
+        require(columnNameAnnotationFqNames.isNotEmpty()) {
+            "At least one column name annotation must be provided"
+        }
+    }
+
+    override fun visitElement(element: IrElement) {
+        when (element) {
+//            is IrDeclaration,
+//            is IrFile,
+//            is IrBlockBody,
+//            is IrModuleFragment -> element.acceptChildrenVoid(this)
+
+            // test for now
+            else -> element.acceptChildrenVoid(this)
+        }
+    }
+
+    /**
+     * Converts
+     * ```kt
+     * @Sparkify
+     * data class User(
+     *     val name: String = "John Doe",
+     *     @get:JvmName("ignored") val age: Int = 25,
+     *     @ColumnName("a") val test: Double = 1.0,
+     *     @get:ColumnName("b") val test2: Double = 2.0,
+     * )
+     * ```
+     * to
+     * ```kt
+     * @Sparkify
+     * data class User(
+     *     @get:JvmName("name") val name: String = "John Doe",
+     *     @get:JvmName("age") val age: Int = 25,
+     *     @get:JvmName("a") @ColumnName("a") val test: Double = 1.0,
+     *     @get:JvmName("b") @get:ColumnName("b") val test2: Double = 2.0,
+     * )
+     * ```
+     */
+    @OptIn(UnsafeDuringIrConstructionAPI::class)
+    override fun visitProperty(declaration: IrProperty) {
+        val origin = declaration.parent as? IrClass ?: return super.visitProperty(declaration)
+        if (sparkifyAnnotationFqNames.none { origin.hasAnnotation(FqName(it)) })
+            return super.visitProperty(declaration)
+
+        if (!origin.isData) return super.visitProperty(declaration)
+
+        // must be in primary constructor
+        val constructorParams = declaration.parentAsClass.primaryConstructor?.valueParameters
+            ?: return super.visitProperty(declaration)
+
+        if (declaration.name !in constructorParams.map { it.name })
+            return super.visitProperty(declaration)
+
+        val getter = declaration.getter ?: return super.visitProperty(declaration)
+
+        // Let's find if there's a ColumnName annotation
+        val columnNameAnnotationFqNames = columnNameAnnotationFqNames.map { FqName(it) }
+
+        val allAnnotations = declaration.annotations +
+                getter.annotations +
+                constructorParams.first { it.name == declaration.name }.annotations
+        val columnNameAnnotation = allAnnotations
+            .firstOrNull { annotation ->
+                columnNameAnnotationFqNames.any {
+                    annotation.isAnnotationWithEqualFqName(it) &&
+                            annotation.valueArguments.count {
+                                it?.type == pluginContext.irBuiltIns.stringType
+                            } >= 1
+                }
+            }
+
+        // if there is, get the ColumnName value, else use the property name as newName
+        val columnName = columnNameAnnotation
+            ?.valueArguments
+            ?.firstOrNull { it?.type == pluginContext.irBuiltIns.stringType }
+            ?.let { it as? IrConst<*> }
+            ?.value as? String
+        val newName = columnName ?: declaration.name.identifier
+
+        val jvmNameFqName = FqName(JvmName::class.qualifiedName!!)
+
+        // remove previous JvmNames
+        getter.annotations = getter.annotations
+            .filterNot { it.isAnnotationWithEqualFqName(jvmNameFqName) }
+
+        // create a new JvmName annotation with newName
+        val jvmNameClassId = jvmNameFqName.toClassId()
+        val jvmName = pluginContext.referenceClass(jvmNameClassId)!!
+        val jvmNameConstructor = jvmName
+            .constructors
+            .firstOrNull()!!
+
+        val jvmNameAnnotationCall = IrConstructorCallImpl.fromSymbolOwner(
+            type = jvmName.defaultType,
+            constructorSymbol = jvmNameConstructor,
+        )
+        jvmNameAnnotationCall.putValueArgument(
+            index = 0,
+            valueArgument = IrConstImpl.string(
+                startOffset = UNDEFINED_OFFSET,
+                endOffset = UNDEFINED_OFFSET,
+                type = pluginContext.irBuiltIns.stringType,
+                value = newName,
+            )
+        )
+        getter.annotations += jvmNameAnnotationCall
+        println("Added @get:JvmName(\"$newName\") annotation to property ${origin.name}.${declaration.name}")
+    }
+
+    private fun FqName.toClassId(): ClassId = ClassId(packageFqName = parent(), topLevelName = shortName())
+
+    /**
+     * Converts
+     * ```kt
+     * @Sparkify
+     * data class User(
+     *    val name: String = "John Doe",
+     *    val age: Int = 25,
+     *    @ColumnName("a") val test: Double = 1.0,
+     *    @get:ColumnName("b") val test2: Double = 2.0,
+     * )
+     * ```
+     * to
+     * ```kt
+     * @Sparkify
+     * data class User(
+     *    val name: String = "John Doe",
+     *    val age: Int = 25,
+     *    @ColumnName("a") val test: Double = 1.0,
+     *    @get:ColumnName("b") val test2: Double = 2.0,
+     * ): scala.Product {
+     *   override fun canEqual(that: Any?): Boolean = that is User
+     *   override fun productElement(n: Int): Any = when (n) {
+     *      0 -> name
+     *      1 -> age
+     *      2 -> test
+     *      else -> throw IndexOutOfBoundsException(n.toString())
+     *    }
+     *    override fun productArity(): Int = 4
+     * }
+     * ```
+     */
+    @OptIn(UnsafeDuringIrConstructionAPI::class)
+    override fun visitClass(declaration: IrClass) {
+        if (sparkifyAnnotationFqNames.none { declaration.hasAnnotation(FqName(it)) })
+            return super.visitClass(declaration)
+
+        if (!declaration.isData) return super.visitClass(declaration)
+
+        // add superclasses
+        val scalaProductClass = productFqNames.firstNotNullOfOrNull {
+            val classId = ClassId.topLevel(FqName(it))
+            pluginContext.referenceClass(classId)
+        }!!
+
+        declaration.superTypes += scalaProductClass.defaultType
+
+        val serializableClass = pluginContext.referenceClass(
+            ClassId.topLevel(FqName("java.io.Serializable"))
+        )!!
+
+        declaration.superTypes += serializableClass.defaultType
+
+        // finding the constructor params
+        val constructorParams = declaration.primaryConstructor?.valueParameters
+            ?: return super.visitClass(declaration)
+
+        // finding properties
+        val props = declaration.properties
+
+        // getting the properties that are in the constructor
+        val properties = constructorParams.mapNotNull { param ->
+            props.firstOrNull { it.name == param.name }
+        }
+
+        // finding supertype Equals
+        val superEqualsInterface = scalaProductClass.superTypes()
+            .first { it.classFqName?.shortName()?.asString()?.contains("Equals") == true }
+            .classOrNull ?: return super.visitClass(declaration)
+
+        // add canEqual
+        val superCanEqualFunction = superEqualsInterface.functions.first {
+            it.owner.name.asString() == "canEqual" &&
+                    it.owner.valueParameters.size == 1 &&
+                    it.owner.valueParameters.first().type == pluginContext.irBuiltIns.anyNType
+        }
+
+        val canEqualFunction = declaration.addFunction(
+            name = "canEqual",
+            returnType = pluginContext.irBuiltIns.booleanType,
+            modality = Modality.OPEN,
+        )
+        with(canEqualFunction) {
+            overriddenSymbols = listOf(superCanEqualFunction)
+            parent = declaration
+
+            // add implicit $this parameter
+            addDispatchReceiver {
+                name = SpecialNames.THIS
+                type = declaration.defaultType
+            }
+
+            // add that parameter
+            val that = addValueParameter(
+                name = Name.identifier("that"),
+                type = pluginContext.irBuiltIns.anyNType,
+            )
+
+            // add body
+            body = pluginContext.irBuiltIns.createIrBuilder(symbol).irBlockBody {
+                val call = irIs(argument = irGet(that), type = declaration.defaultType)
+                +irReturn(call)
+            }
+        }
+
+        // add productArity
+        val superProductArityFunction = scalaProductClass.functions.first {
+            it.owner.name.asString() == "productArity" &&
+                    it.owner.valueParameters.isEmpty()
+        }
+
+        val productArityFunction = declaration.addFunction(
+            name = "productArity",
+            returnType = pluginContext.irBuiltIns.intType,
+            modality = Modality.OPEN,
+        )
+        with(productArityFunction) {
+            overriddenSymbols = listOf(superProductArityFunction)
+            parent = declaration
+
+            // add implicit $this parameter
+            addDispatchReceiver {
+                name = SpecialNames.THIS
+                type = declaration.defaultType
+            }
+
+            // add body
+            body = pluginContext.irBuiltIns.createIrBuilder(symbol).irBlockBody {
+                val const = properties.size.toIrConst(pluginContext.irBuiltIns.intType)
+                +irReturn(const)
+            }
+        }
+
+        // add productElement
+        val superProductElementFunction = scalaProductClass.functions.first {
+            it.owner.name.asString() == "productElement" &&
+                    it.owner.valueParameters.size == 1 &&
+                    it.owner.valueParameters.first().type == pluginContext.irBuiltIns.intType
+        }
+
+        val productElementFunction = declaration.addFunction(
+            name = "productElement",
+            returnType = pluginContext.irBuiltIns.anyNType,
+            modality = Modality.OPEN,
+        )
+        with(productElementFunction) {
+            overriddenSymbols = listOf(superProductElementFunction)
+            parent = declaration
+
+            // add implicit $this parameter
+            val `this` = addDispatchReceiver {
+                name = SpecialNames.THIS
+                type = declaration.defaultType
+            }
+
+            // add n parameter
+            val n = addValueParameter(
+                name = Name.identifier("n"),
+                type = pluginContext.irBuiltIns.intType,
+            )
+
+            // add body
+            body = pluginContext.irBuiltIns.createIrBuilder(symbol).irBlockBody {
+                val whenBranches = buildList {
+                    for ((i, prop) in properties.withIndex()) {
+                        val condition = irEquals(
+                            arg1 = irGet(n),
+                            arg2 = i.toIrConst(pluginContext.irBuiltIns.intType),
+                        )
+                        val call = irCall(prop.getter!!)
+                        with(call) {
+                            origin = IrStatementOrigin.GET_PROPERTY
+                            dispatchReceiver = irGet(`this`)
+                        }
+
+                        val branch = irBranch(
+                            condition = condition,
+                            result = call
+                        )
+                        add(branch)
+                    }
+
+                    val ioobClass = pluginContext.referenceClass(
+                        ClassId(FqName("java.lang"), Name.identifier("IndexOutOfBoundsException"))
+                    )!!
+                    val ioobConstructor = ioobClass.constructors.first { it.owner.valueParameters.isEmpty() }
+                    val throwCall = irThrow(
+                        IrConstructorCallImpl.fromSymbolOwner(
+                            ioobClass.defaultType,
+                            ioobConstructor
+                        )
+                    )
+                    val elseBranch = irElseBranch(throwCall)
+                    add(elseBranch)
+                }
+                val whenBlock = irWhen(pluginContext.irBuiltIns.anyNType, whenBranches)
+                with(whenBlock) {
+                    origin = IrStatementOrigin.IF
+                }
+                +irReturn(whenBlock)
+            }
+        }
+
+        // pass down to the properties
+        declaration.acceptChildrenVoid(this)
+    }
+}
\ No newline at end of file
diff --git a/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/SparkifyIrGenerationExtension.kt b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/SparkifyIrGenerationExtension.kt
new file mode 100644
index 00000000..d17da71f
--- /dev/null
+++ b/compiler-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/ir/SparkifyIrGenerationExtension.kt
@@ -0,0 +1,26 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.ir
+
+import org.jetbrains.kotlin.backend.common.extensions.IrGenerationExtension
+import org.jetbrains.kotlin.backend.common.extensions.IrPluginContext
+import org.jetbrains.kotlin.ir.declarations.IrModuleFragment
+import org.jetbrains.kotlin.ir.visitors.acceptChildrenVoid
+
+class SparkifyIrGenerationExtension(
+    private val sparkifyAnnotationFqNames: List<String>,
+    private val columnNameAnnotationFqNames: List<String>,
+    private val productFqNames: List<String>,
+) : IrGenerationExtension {
+    override fun generate(moduleFragment: IrModuleFragment, pluginContext: IrPluginContext) {
+        val visitors = listOf(
+            DataClassSparkifyGenerator(
+                pluginContext = pluginContext,
+                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+                columnNameAnnotationFqNames = columnNameAnnotationFqNames,
+                productFqNames = productFqNames,
+            ),
+        )
+        for (visitor in visitors) {
+            moduleFragment.acceptChildrenVoid(visitor)
+        }
+    }
+}
diff --git a/compiler-plugin/src/main/resources/META-INF/services/org.jetbrains.kotlin.compiler.plugin.CommandLineProcessor b/compiler-plugin/src/main/resources/META-INF/services/org.jetbrains.kotlin.compiler.plugin.CommandLineProcessor
new file mode 100644
index 00000000..9eff122b
--- /dev/null
+++ b/compiler-plugin/src/main/resources/META-INF/services/org.jetbrains.kotlin.compiler.plugin.CommandLineProcessor
@@ -0,0 +1 @@
+org.jetbrains.kotlinx.spark.api.compilerPlugin.SparkifyCommandLineProcessor
diff --git a/compiler-plugin/src/main/resources/META-INF/services/org.jetbrains.kotlin.compiler.plugin.CompilerPluginRegistrar b/compiler-plugin/src/main/resources/META-INF/services/org.jetbrains.kotlin.compiler.plugin.CompilerPluginRegistrar
new file mode 100644
index 00000000..0568356a
--- /dev/null
+++ b/compiler-plugin/src/main/resources/META-INF/services/org.jetbrains.kotlin.compiler.plugin.CompilerPluginRegistrar
@@ -0,0 +1 @@
+org.jetbrains.kotlinx.spark.api.compilerPlugin.SparkifyCompilerPluginRegistrar
diff --git a/compiler-plugin/src/test-gen/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BoxTestGenerated.java b/compiler-plugin/src/test-gen/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BoxTestGenerated.java
new file mode 100644
index 00000000..1cb5a221
--- /dev/null
+++ b/compiler-plugin/src/test-gen/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BoxTestGenerated.java
@@ -0,0 +1,41 @@
+
+
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.runners;
+
+import com.intellij.testFramework.TestDataPath;
+import org.jetbrains.kotlin.test.util.KtTestUtil;
+import org.jetbrains.kotlin.test.TargetBackend;
+import org.jetbrains.kotlin.test.TestMetadata;
+import org.junit.jupiter.api.Test;
+
+import java.io.File;
+import java.util.regex.Pattern;
+
+/** This class is generated by {@link org.jetbrains.kotlinx.spark.api.compilerPlugin.GenerateTestsKt}. DO NOT MODIFY MANUALLY */
+@SuppressWarnings("all")
+@TestMetadata("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box")
+@TestDataPath("$PROJECT_ROOT")
+public class BoxTestGenerated extends AbstractBoxTest {
+  @Test
+  public void testAllFilesPresentInBox() {
+    KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box"), Pattern.compile("^(.+)\\.kt$"), null, TargetBackend.JVM_IR, true);
+  }
+
+  @Test
+  @TestMetadata("dataClassInFunctionTest.kt")
+  public void testDataClassInFunctionTest() {
+    runTest("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.kt");
+  }
+
+  @Test
+  @TestMetadata("dataClassIsProductTest.kt")
+  public void testDataClassIsProductTest() {
+    runTest("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.kt");
+  }
+
+  @Test
+  @TestMetadata("dataClassTest.kt")
+  public void testDataClassTest() {
+    runTest("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box/dataClassTest.kt");
+  }
+}
diff --git a/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/GenerateTests.kt b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/GenerateTests.kt
new file mode 100644
index 00000000..fb0fde9a
--- /dev/null
+++ b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/GenerateTests.kt
@@ -0,0 +1,23 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin
+
+import org.jetbrains.kotlin.generators.generateTestGroupSuiteWithJUnit5
+import org.jetbrains.kotlinx.spark.api.Artifacts
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.runners.AbstractBoxTest
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.runners.AbstractDiagnosticTest
+
+fun main() {
+    generateTestGroupSuiteWithJUnit5 {
+        testGroup(
+            testDataRoot = "${Artifacts.projectRoot}/${Artifacts.compilerPluginArtifactId}/src/test/resources/testData",
+            testsRoot = "${Artifacts.projectRoot}/${Artifacts.compilerPluginArtifactId}/src/test-gen/kotlin",
+        ) {
+//            testClass<AbstractDiagnosticTest> {
+//                model("diagnostics")
+//            }
+
+            testClass<AbstractBoxTest> {
+                model("box")
+            }
+        }
+    }
+}
diff --git a/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/AbstractBoxTest.kt b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/AbstractBoxTest.kt
new file mode 100644
index 00000000..26030a4a
--- /dev/null
+++ b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/AbstractBoxTest.kt
@@ -0,0 +1,61 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.runners
+
+import org.jetbrains.kotlin.platform.jvm.JvmPlatforms
+import org.jetbrains.kotlin.test.FirParser
+import org.jetbrains.kotlin.test.TargetBackend
+import org.jetbrains.kotlin.test.backend.BlackBoxCodegenSuppressor
+import org.jetbrains.kotlin.test.backend.handlers.IrTextDumpHandler
+import org.jetbrains.kotlin.test.backend.handlers.IrTreeVerifierHandler
+import org.jetbrains.kotlin.test.backend.handlers.JvmBoxRunner
+import org.jetbrains.kotlin.test.backend.ir.JvmIrBackendFacade
+import org.jetbrains.kotlin.test.builders.TestConfigurationBuilder
+import org.jetbrains.kotlin.test.builders.fir2IrStep
+import org.jetbrains.kotlin.test.builders.irHandlersStep
+import org.jetbrains.kotlin.test.builders.jvmArtifactsHandlersStep
+import org.jetbrains.kotlin.test.directives.CodegenTestDirectives.DUMP_IR
+import org.jetbrains.kotlin.test.directives.configureFirParser
+import org.jetbrains.kotlin.test.model.DependencyKind
+import org.jetbrains.kotlin.test.runners.RunnerWithTargetBackendForTestGeneratorMarker
+
+/*
+ * Containers of different directives, which can be used in tests:
+ * - ModuleStructureDirectives
+ * - LanguageSettingsDirectives
+ * - DiagnosticsDirectives
+ * - CodegenTestDirectives
+ *
+ * All of them are located in `org.jetbrains.kotlin.test.directives` package
+ */
+open class AbstractBoxTest : BaseTestRunner(), RunnerWithTargetBackendForTestGeneratorMarker {
+    override val targetBackend: TargetBackend
+        get() = TargetBackend.JVM_IR
+
+    override fun TestConfigurationBuilder.configuration() {
+        globalDefaults {
+            targetBackend = TargetBackend.JVM_IR
+            targetPlatform = JvmPlatforms.defaultJvmPlatform
+            dependencyKind = DependencyKind.Binary
+        }
+
+        configureFirParser(FirParser.Psi)
+
+        defaultDirectives {
+            +DUMP_IR
+        }
+
+        commonFirWithPluginFrontendConfiguration()
+        fir2IrStep()
+        irHandlersStep {
+            useHandlers(
+                ::IrTextDumpHandler,
+                ::IrTreeVerifierHandler,
+            )
+        }
+        facadeStep(::JvmIrBackendFacade)
+        jvmArtifactsHandlersStep {
+            useHandlers(::JvmBoxRunner)
+        }
+
+        useAfterAnalysisCheckers(::BlackBoxCodegenSuppressor)
+    }
+}
diff --git a/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/AbstractDiagnosticTest.kt b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/AbstractDiagnosticTest.kt
new file mode 100644
index 00000000..9c2f362f
--- /dev/null
+++ b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/AbstractDiagnosticTest.kt
@@ -0,0 +1,18 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.runners
+
+import org.jetbrains.kotlin.test.FirParser
+import org.jetbrains.kotlin.test.builders.TestConfigurationBuilder
+import org.jetbrains.kotlin.test.directives.configureFirParser
+import org.jetbrains.kotlin.test.services.EnvironmentBasedStandardLibrariesPathProvider
+import org.jetbrains.kotlin.test.services.KotlinStandardLibrariesPathProvider
+
+abstract class AbstractDiagnosticTest : BaseTestRunner() {
+    override fun TestConfigurationBuilder.configuration() {
+        commonFirWithPluginFrontendConfiguration()
+        configureFirParser(FirParser.Psi)
+    }
+
+    override fun createKotlinStandardLibrariesPathProvider(): KotlinStandardLibrariesPathProvider {
+        return EnvironmentBasedStandardLibrariesPathProvider
+    }
+}
diff --git a/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BaseTestRunner.kt b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BaseTestRunner.kt
new file mode 100644
index 00000000..c60b54ed
--- /dev/null
+++ b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/runners/BaseTestRunner.kt
@@ -0,0 +1,40 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.runners
+
+import org.jetbrains.kotlin.test.builders.TestConfigurationBuilder
+import org.jetbrains.kotlin.test.directives.FirDiagnosticsDirectives
+import org.jetbrains.kotlin.test.directives.JvmEnvironmentConfigurationDirectives
+import org.jetbrains.kotlin.test.initIdeaConfiguration
+import org.jetbrains.kotlin.test.runners.AbstractKotlinCompilerTest
+import org.jetbrains.kotlin.test.runners.baseFirDiagnosticTestConfiguration
+import org.jetbrains.kotlin.test.services.EnvironmentBasedStandardLibrariesPathProvider
+import org.jetbrains.kotlin.test.services.KotlinStandardLibrariesPathProvider
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.services.ExtensionRegistrarConfigurator
+import org.junit.jupiter.api.BeforeAll
+
+abstract class BaseTestRunner : AbstractKotlinCompilerTest() {
+    companion object {
+        @BeforeAll
+        @JvmStatic
+        fun setUp() {
+            initIdeaConfiguration()
+        }
+    }
+
+    override fun createKotlinStandardLibrariesPathProvider(): KotlinStandardLibrariesPathProvider {
+        return EnvironmentBasedStandardLibrariesPathProvider
+    }
+}
+
+fun TestConfigurationBuilder.commonFirWithPluginFrontendConfiguration() {
+    baseFirDiagnosticTestConfiguration()
+
+    defaultDirectives {
+        +FirDiagnosticsDirectives.ENABLE_PLUGIN_PHASES
+        +FirDiagnosticsDirectives.FIR_DUMP
+        +JvmEnvironmentConfigurationDirectives.FULL_JDK
+    }
+
+    useConfigurators(
+        ::ExtensionRegistrarConfigurator,
+    )
+}
diff --git a/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/services/ExtensionRegistrarConfigurator.kt b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/services/ExtensionRegistrarConfigurator.kt
new file mode 100644
index 00000000..a48fa81b
--- /dev/null
+++ b/compiler-plugin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/compilerPlugin/services/ExtensionRegistrarConfigurator.kt
@@ -0,0 +1,39 @@
+package org.jetbrains.kotlinx.spark.api.compilerPlugin.services
+
+import org.jetbrains.kotlin.backend.common.extensions.IrGenerationExtension
+import org.jetbrains.kotlin.compiler.plugin.CompilerPluginRegistrar
+import org.jetbrains.kotlin.config.CompilerConfiguration
+import org.jetbrains.kotlin.fir.extensions.FirExtensionRegistrarAdapter
+import org.jetbrains.kotlin.test.model.TestModule
+import org.jetbrains.kotlin.test.services.EnvironmentConfigurator
+import org.jetbrains.kotlin.test.services.TestServices
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.SparkifyFirPluginRegistrar
+import org.jetbrains.kotlinx.spark.api.compilerPlugin.ir.SparkifyIrGenerationExtension
+
+class ExtensionRegistrarConfigurator(testServices: TestServices) : EnvironmentConfigurator(testServices) {
+    override fun CompilerPluginRegistrar.ExtensionStorage.registerCompilerExtensions(
+        module: TestModule,
+        configuration: CompilerConfiguration,
+    ) {
+        val sparkifyAnnotationFqNames = listOf("foo.bar.Sparkify")
+        val columnNameAnnotationFqNames = listOf("foo.bar.ColumnName")
+        val productFqNames = listOf("foo.bar.Product")
+
+        // Front end (FIR)
+//        FirExtensionRegistrarAdapter.registerExtension(
+//            SparkifyFirPluginRegistrar(
+//                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+//                productFqNames = productFqNames,
+//            )
+//        )
+
+        // Intermediate Representation IR
+        IrGenerationExtension.registerExtension(
+            SparkifyIrGenerationExtension(
+                sparkifyAnnotationFqNames = sparkifyAnnotationFqNames,
+                columnNameAnnotationFqNames = columnNameAnnotationFqNames,
+                productFqNames = productFqNames,
+            )
+        )
+    }
+}
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.ir.txt b/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.ir.txt
new file mode 100644
index 00000000..edde60a0
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.ir.txt
@@ -0,0 +1,469 @@
+FILE fqName:foo.bar fileName:/dataClassInFunctionTest.kt
+  CLASS ANNOTATION_CLASS name:ColumnName modality:OPEN visibility:public superTypes:[kotlin.Annotation]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.ColumnName
+    PROPERTY name:name visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'name: kotlin.String declared in foo.bar.ColumnName.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.ColumnName) returnType:kotlin.String
+        correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.ColumnName
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.ColumnName'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.ColumnName declared in foo.bar.ColumnName.<get-name>' type=foo.bar.ColumnName origin=null
+    CONSTRUCTOR visibility:public <> (name:kotlin.String) returnType:foo.bar.ColumnName [primary]
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS ANNOTATION_CLASS name:ColumnName modality:OPEN visibility:public superTypes:[kotlin.Annotation]'
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+  CLASS ANNOTATION_CLASS name:Sparkify modality:OPEN visibility:public superTypes:[kotlin.Annotation]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Sparkify
+    CONSTRUCTOR visibility:public <> () returnType:foo.bar.Sparkify [primary]
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS ANNOTATION_CLASS name:Sparkify modality:OPEN visibility:public superTypes:[kotlin.Annotation]'
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+  CLASS INTERFACE name:Equals modality:ABSTRACT visibility:public superTypes:[kotlin.Any]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Equals
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN name:canEqual visibility:public modality:ABSTRACT <> ($this:foo.bar.Equals, that:kotlin.Any?) returnType:kotlin.Boolean
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Equals
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+  CLASS INTERFACE name:Product modality:ABSTRACT visibility:public superTypes:[foo.bar.Equals]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Product
+    FUN FAKE_OVERRIDE name:canEqual visibility:public modality:ABSTRACT <> ($this:foo.bar.Equals, that:kotlin.Any?) returnType:kotlin.Boolean [fake_override]
+      overridden:
+        public abstract fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Equals
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN name:productArity visibility:public modality:ABSTRACT <> ($this:foo.bar.Product) returnType:kotlin.Int
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Product
+    FUN name:productElement visibility:public modality:ABSTRACT <> ($this:foo.bar.Product, n:kotlin.Int) returnType:kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Product
+      VALUE_PARAMETER name:n index:0 type:kotlin.Int
+  FUN name:box visibility:public modality:FINAL <> () returnType:kotlin.String
+    BLOCK_BODY
+      CLASS CLASS name:User modality:FINAL visibility:local [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]
+        annotations:
+          Sparkify
+        $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.box.User
+        PROPERTY name:name visibility:public modality:FINAL [val]
+          FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+            EXPRESSION_BODY
+              GET_VAR 'name: kotlin.String declared in foo.bar.box.User.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+          FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.String
+            annotations:
+              JvmName(name = "name")
+            correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+            $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+            BLOCK_BODY
+              RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.box.User'
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.<get-name>' type=foo.bar.box.User origin=null
+        PROPERTY name:age visibility:public modality:FINAL [val]
+          FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]
+            EXPRESSION_BODY
+              GET_VAR 'age: kotlin.Int declared in foo.bar.box.User.<init>' type=kotlin.Int origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+          FUN DEFAULT_PROPERTY_ACCESSOR name:<get-age> visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.Int
+            annotations:
+              JvmName(name = "age")
+            correspondingProperty: PROPERTY name:age visibility:public modality:FINAL [val]
+            $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+            BLOCK_BODY
+              RETURN type=kotlin.Nothing from='public final fun <get-age> (): kotlin.Int declared in foo.bar.box.User'
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.<get-age>' type=foo.bar.box.User origin=null
+        PROPERTY name:test visibility:public modality:FINAL [val]
+          FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]
+            EXPRESSION_BODY
+              GET_VAR 'test: kotlin.Double declared in foo.bar.box.User.<init>' type=kotlin.Double origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+          FUN DEFAULT_PROPERTY_ACCESSOR name:<get-test> visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.Double
+            annotations:
+              JvmName(name = "a")
+            correspondingProperty: PROPERTY name:test visibility:public modality:FINAL [val]
+            $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+            BLOCK_BODY
+              RETURN type=kotlin.Nothing from='public final fun <get-test> (): kotlin.Double declared in foo.bar.box.User'
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.<get-test>' type=foo.bar.box.User origin=null
+        PROPERTY name:test2 visibility:public modality:FINAL [val]
+          FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]
+            EXPRESSION_BODY
+              GET_VAR 'test2: kotlin.Double declared in foo.bar.box.User.<init>' type=kotlin.Double origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+          FUN DEFAULT_PROPERTY_ACCESSOR name:<get-test2> visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.Double
+            annotations:
+              ColumnName(name = "b")
+              JvmName(name = "b")
+            correspondingProperty: PROPERTY name:test2 visibility:public modality:FINAL [val]
+            $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+            BLOCK_BODY
+              RETURN type=kotlin.Nothing from='public final fun <get-test2> (): kotlin.Double declared in foo.bar.box.User'
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.<get-test2>' type=foo.bar.box.User origin=null
+        CONSTRUCTOR visibility:public <> (name:kotlin.String, age:kotlin.Int, test:kotlin.Double, test2:kotlin.Double) returnType:foo.bar.box.User [primary]
+          VALUE_PARAMETER name:name index:0 type:kotlin.String
+            EXPRESSION_BODY
+              CONST String type=kotlin.String value="John Doe"
+          VALUE_PARAMETER name:age index:1 type:kotlin.Int
+            EXPRESSION_BODY
+              CONST Int type=kotlin.Int value=25
+          VALUE_PARAMETER name:test index:2 type:kotlin.Double
+            annotations:
+              ColumnName(name = "a")
+            EXPRESSION_BODY
+              CONST Double type=kotlin.Double value=1.0
+          VALUE_PARAMETER name:test2 index:3 type:kotlin.Double
+            EXPRESSION_BODY
+              CONST Double type=kotlin.Double value=2.0
+          BLOCK_BODY
+            DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+            INSTANCE_INITIALIZER_CALL classDescriptor='CLASS CLASS name:User modality:FINAL visibility:local [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]'
+        FUN GENERATED_DATA_CLASS_MEMBER name:component1 visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.String [operator]
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public final fun component1 (): kotlin.String declared in foo.bar.box.User'
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.component1' type=foo.bar.box.User origin=null
+        FUN GENERATED_DATA_CLASS_MEMBER name:component2 visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.Int [operator]
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public final fun component2 (): kotlin.Int declared in foo.bar.box.User'
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.component2' type=foo.bar.box.User origin=null
+        FUN GENERATED_DATA_CLASS_MEMBER name:component3 visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.Double [operator]
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public final fun component3 (): kotlin.Double declared in foo.bar.box.User'
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.component3' type=foo.bar.box.User origin=null
+        FUN GENERATED_DATA_CLASS_MEMBER name:component4 visibility:public modality:FINAL <> ($this:foo.bar.box.User) returnType:kotlin.Double [operator]
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public final fun component4 (): kotlin.Double declared in foo.bar.box.User'
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.component4' type=foo.bar.box.User origin=null
+        FUN GENERATED_DATA_CLASS_MEMBER name:copy visibility:public modality:FINAL <> ($this:foo.bar.box.User, name:kotlin.String, age:kotlin.Int, test:kotlin.Double, test2:kotlin.Double) returnType:foo.bar.box.User
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          VALUE_PARAMETER name:name index:0 type:kotlin.String
+            EXPRESSION_BODY
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.copy' type=foo.bar.box.User origin=null
+          VALUE_PARAMETER name:age index:1 type:kotlin.Int
+            EXPRESSION_BODY
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.copy' type=foo.bar.box.User origin=null
+          VALUE_PARAMETER name:test index:2 type:kotlin.Double
+            annotations:
+              ColumnName(name = "a")
+            EXPRESSION_BODY
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.copy' type=foo.bar.box.User origin=null
+          VALUE_PARAMETER name:test2 index:3 type:kotlin.Double
+            EXPRESSION_BODY
+              GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.copy' type=foo.bar.box.User origin=null
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public final fun copy (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double): foo.bar.box.User declared in foo.bar.box.User'
+              CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double) declared in foo.bar.box.User' type=foo.bar.box.User origin=null
+                name: GET_VAR 'name: kotlin.String declared in foo.bar.box.User.copy' type=kotlin.String origin=null
+                age: GET_VAR 'age: kotlin.Int declared in foo.bar.box.User.copy' type=kotlin.Int origin=null
+                test: GET_VAR 'test: kotlin.Double declared in foo.bar.box.User.copy' type=kotlin.Double origin=null
+                test2: GET_VAR 'test2: kotlin.Double declared in foo.bar.box.User.copy' type=kotlin.Double origin=null
+        FUN GENERATED_DATA_CLASS_MEMBER name:equals visibility:public modality:OPEN <> ($this:foo.bar.box.User, other:kotlin.Any?) returnType:kotlin.Boolean [operator]
+          overridden:
+            public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+          BLOCK_BODY
+            WHEN type=kotlin.Unit origin=null
+              BRANCH
+                if: CALL 'public final fun EQEQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQEQ
+                  arg0: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                  arg1: GET_VAR 'other: kotlin.Any? declared in foo.bar.box.User.equals' type=kotlin.Any? origin=null
+                then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+                  CONST Boolean type=kotlin.Boolean value=true
+            WHEN type=kotlin.Unit origin=null
+              BRANCH
+                if: TYPE_OP type=kotlin.Boolean origin=NOT_INSTANCEOF typeOperand=foo.bar.box.User
+                  GET_VAR 'other: kotlin.Any? declared in foo.bar.box.User.equals' type=kotlin.Any? origin=null
+                then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+                  CONST Boolean type=kotlin.Boolean value=false
+            VAR IR_TEMPORARY_VARIABLE name:tmp_0 type:foo.bar.box.User [val]
+              TYPE_OP type=foo.bar.box.User origin=CAST typeOperand=foo.bar.box.User
+                GET_VAR 'other: kotlin.Any? declared in foo.bar.box.User.equals' type=kotlin.Any? origin=null
+            WHEN type=kotlin.Unit origin=null
+              BRANCH
+                if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                  $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                    arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                      receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                    arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                      receiver: GET_VAR 'val tmp_0: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+                  CONST Boolean type=kotlin.Boolean value=false
+            WHEN type=kotlin.Unit origin=null
+              BRANCH
+                if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                  $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                    arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                      receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                    arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                      receiver: GET_VAR 'val tmp_0: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+                  CONST Boolean type=kotlin.Boolean value=false
+            WHEN type=kotlin.Unit origin=null
+              BRANCH
+                if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                  $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                    arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                      receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                    arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                      receiver: GET_VAR 'val tmp_0: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+                  CONST Boolean type=kotlin.Boolean value=false
+            WHEN type=kotlin.Unit origin=null
+              BRANCH
+                if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                  $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                    arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                      receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                    arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                      receiver: GET_VAR 'val tmp_0: foo.bar.box.User declared in foo.bar.box.User.equals' type=foo.bar.box.User origin=null
+                then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+                  CONST Boolean type=kotlin.Boolean value=false
+            RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+              CONST Boolean type=kotlin.Boolean value=true
+        FUN GENERATED_DATA_CLASS_MEMBER name:hashCode visibility:public modality:OPEN <> ($this:foo.bar.box.User) returnType:kotlin.Int
+          overridden:
+            public open fun hashCode (): kotlin.Int declared in kotlin.Any
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          BLOCK_BODY
+            VAR name:result type:kotlin.Int [var]
+              CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.String' type=kotlin.Int origin=null
+                $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.hashCode' type=foo.bar.box.User origin=null
+            SET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Unit origin=EQ
+              CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                  $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Int origin=null
+                  other: CONST Int type=kotlin.Int value=31
+                other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                  $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                    receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.hashCode' type=foo.bar.box.User origin=null
+            SET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Unit origin=EQ
+              CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                  $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Int origin=null
+                  other: CONST Int type=kotlin.Int value=31
+                other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Double' type=kotlin.Int origin=null
+                  $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                    receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.hashCode' type=foo.bar.box.User origin=null
+            SET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Unit origin=EQ
+              CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+                  $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Int origin=null
+                  other: CONST Int type=kotlin.Int value=31
+                other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Double' type=kotlin.Int origin=null
+                  $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                    receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.hashCode' type=foo.bar.box.User origin=null
+            RETURN type=kotlin.Nothing from='public open fun hashCode (): kotlin.Int declared in foo.bar.box.User'
+              GET_VAR 'var result: kotlin.Int declared in foo.bar.box.User.hashCode' type=kotlin.Int origin=null
+        FUN GENERATED_DATA_CLASS_MEMBER name:toString visibility:public modality:OPEN <> ($this:foo.bar.box.User) returnType:kotlin.String
+          overridden:
+            public open fun toString (): kotlin.String declared in kotlin.Any
+          $this: VALUE_PARAMETER name:<this> type:foo.bar.box.User
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public open fun toString (): kotlin.String declared in foo.bar.box.User'
+              STRING_CONCATENATION type=kotlin.String
+                CONST String type=kotlin.String value="User("
+                CONST String type=kotlin.String value="name="
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.toString' type=foo.bar.box.User origin=null
+                CONST String type=kotlin.String value=", "
+                CONST String type=kotlin.String value="age="
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.toString' type=foo.bar.box.User origin=null
+                CONST String type=kotlin.String value=", "
+                CONST String type=kotlin.String value="test="
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.toString' type=foo.bar.box.User origin=null
+                CONST String type=kotlin.String value=", "
+                CONST String type=kotlin.String value="test2="
+                GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.box.User declared in foo.bar.box.User.toString' type=foo.bar.box.User origin=null
+                CONST String type=kotlin.String value=")"
+        FUN name:canEqual visibility:public modality:OPEN <> ($this:foo.bar.box.User, that:kotlin.Any?) returnType:kotlin.Boolean
+          overridden:
+            public abstract fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+          $this: VALUE_PARAMETER name:$this type:foo.bar.box.User
+          VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public open fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.box.User'
+              TYPE_OP type=kotlin.Boolean origin=INSTANCEOF typeOperand=foo.bar.box.User
+                GET_VAR 'that: kotlin.Any? declared in foo.bar.box.User.canEqual' type=kotlin.Any? origin=null
+        FUN name:productArity visibility:public modality:OPEN <> ($this:foo.bar.box.User) returnType:kotlin.Int
+          overridden:
+            public abstract fun productArity (): kotlin.Int declared in foo.bar.Product
+          $this: VALUE_PARAMETER name:$this type:foo.bar.box.User
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public open fun productArity (): kotlin.Int declared in foo.bar.box.User'
+              CONST Int type=kotlin.Int value=4
+        FUN name:productElement visibility:public modality:OPEN <> ($this:foo.bar.box.User, n:kotlin.Int) returnType:kotlin.Any?
+          overridden:
+            public abstract fun productElement (n: kotlin.Int): kotlin.Any declared in foo.bar.Product
+          $this: VALUE_PARAMETER name:$this type:foo.bar.box.User
+          VALUE_PARAMETER name:n index:0 type:kotlin.Int
+          BLOCK_BODY
+            RETURN type=kotlin.Nothing from='public open fun productElement (n: kotlin.Int): kotlin.Any? declared in foo.bar.box.User'
+              WHEN type=kotlin.Any? origin=IF
+                BRANCH
+                  if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                    arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.box.User.productElement' type=kotlin.Int origin=null
+                    arg1: CONST Int type=kotlin.Int value=0
+                  then: CALL 'public final fun <get-name> (): kotlin.String declared in foo.bar.box.User' type=kotlin.String origin=GET_PROPERTY
+                    $this: GET_VAR '$this: foo.bar.box.User declared in foo.bar.box.User.productElement' type=foo.bar.box.User origin=null
+                BRANCH
+                  if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                    arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.box.User.productElement' type=kotlin.Int origin=null
+                    arg1: CONST Int type=kotlin.Int value=1
+                  then: CALL 'public final fun <get-age> (): kotlin.Int declared in foo.bar.box.User' type=kotlin.Int origin=GET_PROPERTY
+                    $this: GET_VAR '$this: foo.bar.box.User declared in foo.bar.box.User.productElement' type=foo.bar.box.User origin=null
+                BRANCH
+                  if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                    arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.box.User.productElement' type=kotlin.Int origin=null
+                    arg1: CONST Int type=kotlin.Int value=2
+                  then: CALL 'public final fun <get-test> (): kotlin.Double declared in foo.bar.box.User' type=kotlin.Double origin=GET_PROPERTY
+                    $this: GET_VAR '$this: foo.bar.box.User declared in foo.bar.box.User.productElement' type=foo.bar.box.User origin=null
+                BRANCH
+                  if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                    arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.box.User.productElement' type=kotlin.Int origin=null
+                    arg1: CONST Int type=kotlin.Int value=3
+                  then: CALL 'public final fun <get-test2> (): kotlin.Double declared in foo.bar.box.User' type=kotlin.Double origin=GET_PROPERTY
+                    $this: GET_VAR '$this: foo.bar.box.User declared in foo.bar.box.User.productElement' type=foo.bar.box.User origin=null
+                BRANCH
+                  if: CONST Boolean type=kotlin.Boolean value=true
+                  then: THROW type=kotlin.Nothing
+                    CONSTRUCTOR_CALL 'public constructor <init> () declared in java.lang.IndexOutOfBoundsException' type=java.lang.IndexOutOfBoundsException origin=null
+      VAR name:user type:foo.bar.box.User [val]
+        CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double) declared in foo.bar.box.User' type=foo.bar.box.User origin=null
+      VAR name:name type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.box.User> origin=GET_PROPERTY
+              <T>: foo.bar.box.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:local [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.box.User>
+            p0: CONST String type=kotlin.String value="name"
+          p0: GET_VAR 'val user: foo.bar.box.User declared in foo.bar.box' type=foo.bar.box.User origin=null
+      VAR name:age type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.box.User> origin=GET_PROPERTY
+              <T>: foo.bar.box.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:local [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.box.User>
+            p0: CONST String type=kotlin.String value="age"
+          p0: GET_VAR 'val user: foo.bar.box.User declared in foo.bar.box' type=foo.bar.box.User origin=null
+      VAR name:a type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.box.User> origin=GET_PROPERTY
+              <T>: foo.bar.box.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:local [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.box.User>
+            p0: CONST String type=kotlin.String value="a"
+          p0: GET_VAR 'val user: foo.bar.box.User declared in foo.bar.box' type=foo.bar.box.User origin=null
+      VAR name:b type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.box.User> origin=GET_PROPERTY
+              <T>: foo.bar.box.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:local [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.box.User>
+            p0: CONST String type=kotlin.String value="b"
+          p0: GET_VAR 'val user: foo.bar.box.User declared in foo.bar.box' type=foo.bar.box.User origin=null
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: WHEN type=kotlin.Boolean origin=OROR
+            BRANCH
+              if: WHEN type=kotlin.Boolean origin=OROR
+                BRANCH
+                  if: WHEN type=kotlin.Boolean origin=OROR
+                    BRANCH
+                      if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                        $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                          arg0: GET_VAR 'val name: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                          arg1: CONST String type=kotlin.String value="John Doe"
+                      then: CONST Boolean type=kotlin.Boolean value=true
+                    BRANCH
+                      if: CONST Boolean type=kotlin.Boolean value=true
+                      then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                        $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                          arg0: GET_VAR 'val age: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                          arg1: CONST Int type=kotlin.Int value=25
+                  then: CONST Boolean type=kotlin.Boolean value=true
+                BRANCH
+                  if: CONST Boolean type=kotlin.Boolean value=true
+                  then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                    $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                      arg0: GET_VAR 'val a: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                      arg1: CONST Double type=kotlin.Double value=1.0
+              then: CONST Boolean type=kotlin.Boolean value=true
+            BRANCH
+              if: CONST Boolean type=kotlin.Boolean value=true
+              then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                  arg0: GET_VAR 'val b: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                  arg1: CONST Double type=kotlin.Double value=2.0
+          then: BLOCK type=kotlin.Unit origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              CONST String type=kotlin.String value="Could not invoke functions name(), age(), a(), or b() from Java"
+      RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+        CONST String type=kotlin.String value="OK"
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.txt b/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.txt
new file mode 100644
index 00000000..44e5a93f
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.fir.txt
@@ -0,0 +1,71 @@
+FILE: dataClassInFunctionTest.kt
+    package foo.bar
+
+    public final annotation class Sparkify : R|kotlin/Annotation| {
+        public constructor(): R|foo/bar/Sparkify| {
+            super<R|kotlin/Any|>()
+        }
+
+    }
+    public final annotation class ColumnName : R|kotlin/Annotation| {
+        public constructor(name: R|kotlin/String|): R|foo/bar/ColumnName| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+    }
+    public abstract interface Equals : R|kotlin/Any| {
+        public abstract fun canEqual(that: R|kotlin/Any?|): R|kotlin/Boolean|
+
+    }
+    public abstract interface Product : R|foo/bar/Equals| {
+        public abstract fun productElement(n: R|kotlin/Int|): R|kotlin/Any|
+
+        public abstract fun productArity(): R|kotlin/Int|
+
+    }
+    public final fun box(): R|kotlin/String| {
+        @R|foo/bar/Sparkify|() local final data class User : R|kotlin/Any| {
+            public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25), @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = Double(1.0), test2: R|kotlin/Double| = Double(2.0)): R|<local>/User| {
+                super<R|kotlin/Any|>()
+            }
+
+            public final val name: R|kotlin/String| = R|<local>/name|
+                public get(): R|kotlin/String|
+
+            public final val age: R|kotlin/Int| = R|<local>/age|
+                public get(): R|kotlin/Int|
+
+            public final val test: R|kotlin/Double| = R|<local>/test|
+                public get(): R|kotlin/Double|
+
+            public final val test2: R|kotlin/Double| = R|<local>/test2|
+                @PROPERTY_GETTER:R|foo/bar/ColumnName|(name = String(b)) public get(): R|kotlin/Double|
+
+            public final operator fun component1(): R|kotlin/String|
+
+            public final operator fun component2(): R|kotlin/Int|
+
+            public final operator fun component3(): R|kotlin/Double|
+
+            public final operator fun component4(): R|kotlin/Double|
+
+            public final fun copy(name: R|kotlin/String| = this@R|<local>/User|.R|<local>/name|, age: R|kotlin/Int| = this@R|<local>/User|.R|<local>/age|, @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = this@R|<local>/User|.R|<local>/test|, test2: R|kotlin/Double| = this@R|<local>/User|.R|<local>/test2|): R|<local>/User|
+
+        }
+
+        lval user: R|<local>/User| = R|<local>/User.User|()
+        lval name: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(name)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval age: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(age)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval a: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(a)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval b: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(b)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        when () {
+            !=(R|<local>/name|, String(John Doe)) || !=(R|<local>/age|, Int(25)) || !=(R|<local>/a|, Double(1.0)) || !=(R|<local>/b|, Double(2.0)) ->  {
+                ^box String(Could not invoke functions name(), age(), a(), or b() from Java)
+            }
+        }
+
+        ^box String(OK)
+    }
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.kt b/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.kt
new file mode 100644
index 00000000..bb5bd34d
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.kt
@@ -0,0 +1,37 @@
+package foo.bar
+
+annotation class Sparkify
+annotation class ColumnName(val name: String)
+
+// Fake Equals
+interface Equals {
+    fun canEqual(that: Any?): Boolean
+}
+
+// Fake Product
+interface Product: Equals {
+    fun productElement(n: Int): Any
+    fun productArity(): Int
+}
+
+fun box(): String {
+
+    @Sparkify
+    data class User(
+        val name: String = "John Doe",
+        val age: Int = 25,
+        @ColumnName("a") val test: Double = 1.0,
+        @get:ColumnName("b") val test2: Double = 2.0,
+    )
+
+    val user = User()
+    val name = User::class.java.getMethod("name").invoke(user)
+    val age = User::class.java.getMethod("age").invoke(user)
+    val a = User::class.java.getMethod("a").invoke(user)
+    val b = User::class.java.getMethod("b").invoke(user)
+
+    if (name != "John Doe" || age != 25 || a != 1.0 || b != 2.0) {
+        return "Could not invoke functions name(), age(), a(), or b() from Java"
+    }
+    return "OK"
+}
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.fir.ir.txt b/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.fir.ir.txt
new file mode 100644
index 00000000..d204965a
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.fir.ir.txt
@@ -0,0 +1,569 @@
+FILE fqName:foo.bar fileName:/dataClassIsProductTest.kt
+  CLASS ANNOTATION_CLASS name:ColumnName modality:OPEN visibility:public superTypes:[kotlin.Annotation]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.ColumnName
+    PROPERTY name:name visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'name: kotlin.String declared in foo.bar.ColumnName.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.ColumnName) returnType:kotlin.String
+        correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.ColumnName
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.ColumnName'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.ColumnName declared in foo.bar.ColumnName.<get-name>' type=foo.bar.ColumnName origin=null
+    CONSTRUCTOR visibility:public <> (name:kotlin.String) returnType:foo.bar.ColumnName [primary]
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS ANNOTATION_CLASS name:ColumnName modality:OPEN visibility:public superTypes:[kotlin.Annotation]'
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+  CLASS ANNOTATION_CLASS name:Sparkify modality:OPEN visibility:public superTypes:[kotlin.Annotation]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Sparkify
+    CONSTRUCTOR visibility:public <> () returnType:foo.bar.Sparkify [primary]
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS ANNOTATION_CLASS name:Sparkify modality:OPEN visibility:public superTypes:[kotlin.Annotation]'
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+  CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]
+    annotations:
+      Sparkify
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.User
+    PROPERTY name:name visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'name: kotlin.String declared in foo.bar.User.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.String
+        annotations:
+          JvmName(name = "name")
+        correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-name>' type=foo.bar.User origin=null
+    PROPERTY name:age visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'age: kotlin.Int declared in foo.bar.User.<init>' type=kotlin.Int origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-age> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Int
+        annotations:
+          JvmName(name = "age")
+        correspondingProperty: PROPERTY name:age visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-age> (): kotlin.Int declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-age>' type=foo.bar.User origin=null
+    PROPERTY name:test visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'test: kotlin.Double declared in foo.bar.User.<init>' type=kotlin.Double origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-test> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double
+        annotations:
+          JvmName(name = "a")
+        correspondingProperty: PROPERTY name:test visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-test> (): kotlin.Double declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-test>' type=foo.bar.User origin=null
+    PROPERTY name:test2 visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'test2: kotlin.Double declared in foo.bar.User.<init>' type=kotlin.Double origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-test2> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double
+        annotations:
+          ColumnName(name = "b")
+          JvmName(name = "b")
+        correspondingProperty: PROPERTY name:test2 visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-test2> (): kotlin.Double declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-test2>' type=foo.bar.User origin=null
+    CONSTRUCTOR visibility:public <> (name:kotlin.String, age:kotlin.Int, test:kotlin.Double, test2:kotlin.Double) returnType:foo.bar.User [primary]
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+        EXPRESSION_BODY
+          CONST String type=kotlin.String value="John Doe"
+      VALUE_PARAMETER name:age index:1 type:kotlin.Int
+        EXPRESSION_BODY
+          CONST Int type=kotlin.Int value=25
+      VALUE_PARAMETER name:test index:2 type:kotlin.Double
+        annotations:
+          ColumnName(name = "a")
+        EXPRESSION_BODY
+          CONST Double type=kotlin.Double value=1.0
+      VALUE_PARAMETER name:test2 index:3 type:kotlin.Double
+        EXPRESSION_BODY
+          CONST Double type=kotlin.Double value=2.0
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]'
+    FUN GENERATED_DATA_CLASS_MEMBER name:component1 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.String [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component1 (): kotlin.String declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component1' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component2 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Int [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component2 (): kotlin.Int declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component2' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component3 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component3 (): kotlin.Double declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component3' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component4 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component4 (): kotlin.Double declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component4' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:copy visibility:public modality:FINAL <> ($this:foo.bar.User, name:kotlin.String, age:kotlin.Int, test:kotlin.Double, test2:kotlin.Double) returnType:foo.bar.User
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      VALUE_PARAMETER name:age index:1 type:kotlin.Int
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      VALUE_PARAMETER name:test index:2 type:kotlin.Double
+        annotations:
+          ColumnName(name = "a")
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      VALUE_PARAMETER name:test2 index:3 type:kotlin.Double
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun copy (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double): foo.bar.User declared in foo.bar.User'
+          CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double) declared in foo.bar.User' type=foo.bar.User origin=null
+            name: GET_VAR 'name: kotlin.String declared in foo.bar.User.copy' type=kotlin.String origin=null
+            age: GET_VAR 'age: kotlin.Int declared in foo.bar.User.copy' type=kotlin.Int origin=null
+            test: GET_VAR 'test: kotlin.Double declared in foo.bar.User.copy' type=kotlin.Double origin=null
+            test2: GET_VAR 'test2: kotlin.Double declared in foo.bar.User.copy' type=kotlin.Double origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:equals visibility:public modality:OPEN <> ($this:foo.bar.User, other:kotlin.Any?) returnType:kotlin.Boolean [operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+      BLOCK_BODY
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun EQEQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQEQ
+              arg0: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+              arg1: GET_VAR 'other: kotlin.Any? declared in foo.bar.User.equals' type=kotlin.Any? origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=true
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: TYPE_OP type=kotlin.Boolean origin=NOT_INSTANCEOF typeOperand=foo.bar.User
+              GET_VAR 'other: kotlin.Any? declared in foo.bar.User.equals' type=kotlin.Any? origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        VAR IR_TEMPORARY_VARIABLE name:tmp_0 type:foo.bar.User [val]
+          TYPE_OP type=foo.bar.User origin=CAST typeOperand=foo.bar.User
+            GET_VAR 'other: kotlin.Any? declared in foo.bar.User.equals' type=kotlin.Any? origin=null
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR 'val tmp_0: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR 'val tmp_0: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR 'val tmp_0: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR 'val tmp_0: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+          CONST Boolean type=kotlin.Boolean value=true
+    FUN GENERATED_DATA_CLASS_MEMBER name:hashCode visibility:public modality:OPEN <> ($this:foo.bar.User) returnType:kotlin.Int
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        VAR name:result type:kotlin.Int [var]
+          CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.String' type=kotlin.Int origin=null
+            $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Double' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Double' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        RETURN type=kotlin.Nothing from='public open fun hashCode (): kotlin.Int declared in foo.bar.User'
+          GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:toString visibility:public modality:OPEN <> ($this:foo.bar.User) returnType:kotlin.String
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun toString (): kotlin.String declared in foo.bar.User'
+          STRING_CONCATENATION type=kotlin.String
+            CONST String type=kotlin.String value="User("
+            CONST String type=kotlin.String value="name="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="age="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="test="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="test2="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=")"
+    FUN name:canEqual visibility:public modality:OPEN <> ($this:foo.bar.User, that:kotlin.Any?) returnType:kotlin.Boolean
+      overridden:
+        public abstract fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:$this type:foo.bar.User
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+          TYPE_OP type=kotlin.Boolean origin=INSTANCEOF typeOperand=foo.bar.User
+            GET_VAR 'that: kotlin.Any? declared in foo.bar.User.canEqual' type=kotlin.Any? origin=null
+    FUN name:productArity visibility:public modality:OPEN <> ($this:foo.bar.User) returnType:kotlin.Int
+      overridden:
+        public abstract fun productArity (): kotlin.Int declared in foo.bar.Product
+      $this: VALUE_PARAMETER name:$this type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun productArity (): kotlin.Int declared in foo.bar.User'
+          CONST Int type=kotlin.Int value=4
+    FUN name:productElement visibility:public modality:OPEN <> ($this:foo.bar.User, n:kotlin.Int) returnType:kotlin.Any?
+      overridden:
+        public abstract fun productElement (n: kotlin.Int): kotlin.Any declared in foo.bar.Product
+      $this: VALUE_PARAMETER name:$this type:foo.bar.User
+      VALUE_PARAMETER name:n index:0 type:kotlin.Int
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun productElement (n: kotlin.Int): kotlin.Any? declared in foo.bar.User'
+          WHEN type=kotlin.Any? origin=IF
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=0
+              then: CALL 'public final fun <get-name> (): kotlin.String declared in foo.bar.User' type=kotlin.String origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=1
+              then: CALL 'public final fun <get-age> (): kotlin.Int declared in foo.bar.User' type=kotlin.Int origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=2
+              then: CALL 'public final fun <get-test> (): kotlin.Double declared in foo.bar.User' type=kotlin.Double origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=3
+              then: CALL 'public final fun <get-test2> (): kotlin.Double declared in foo.bar.User' type=kotlin.Double origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CONST Boolean type=kotlin.Boolean value=true
+              then: THROW type=kotlin.Nothing
+                CONSTRUCTOR_CALL 'public constructor <init> () declared in java.lang.IndexOutOfBoundsException' type=java.lang.IndexOutOfBoundsException origin=null
+  CLASS INTERFACE name:Equals modality:ABSTRACT visibility:public superTypes:[kotlin.Any]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Equals
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN name:canEqual visibility:public modality:ABSTRACT <> ($this:foo.bar.Equals, that:kotlin.Any?) returnType:kotlin.Boolean
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Equals
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+  CLASS INTERFACE name:Product modality:ABSTRACT visibility:public superTypes:[foo.bar.Equals]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Product
+    FUN FAKE_OVERRIDE name:canEqual visibility:public modality:ABSTRACT <> ($this:foo.bar.Equals, that:kotlin.Any?) returnType:kotlin.Boolean [fake_override]
+      overridden:
+        public abstract fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Equals
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN name:productArity visibility:public modality:ABSTRACT <> ($this:foo.bar.Product) returnType:kotlin.Int
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Product
+    FUN name:productElement visibility:public modality:ABSTRACT <> ($this:foo.bar.Product, n:kotlin.Int) returnType:kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Product
+      VALUE_PARAMETER name:n index:0 type:kotlin.Int
+  FUN name:box visibility:public modality:FINAL <> () returnType:kotlin.String
+    BLOCK_BODY
+      VAR name:user type:foo.bar.User [val]
+        CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double) declared in foo.bar.User' type=foo.bar.User origin=null
+      VAR name:name type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="name"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      VAR name:age type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="age"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      VAR name:a type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="a"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      VAR name:b type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="b"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: WHEN type=kotlin.Boolean origin=OROR
+            BRANCH
+              if: WHEN type=kotlin.Boolean origin=OROR
+                BRANCH
+                  if: WHEN type=kotlin.Boolean origin=OROR
+                    BRANCH
+                      if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                        $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                          arg0: GET_VAR 'val name: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                          arg1: CONST String type=kotlin.String value="John Doe"
+                      then: CONST Boolean type=kotlin.Boolean value=true
+                    BRANCH
+                      if: CONST Boolean type=kotlin.Boolean value=true
+                      then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                        $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                          arg0: GET_VAR 'val age: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                          arg1: CONST Int type=kotlin.Int value=25
+                  then: CONST Boolean type=kotlin.Boolean value=true
+                BRANCH
+                  if: CONST Boolean type=kotlin.Boolean value=true
+                  then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                    $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                      arg0: GET_VAR 'val a: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                      arg1: CONST Double type=kotlin.Double value=1.0
+              then: CONST Boolean type=kotlin.Boolean value=true
+            BRANCH
+              if: CONST Boolean type=kotlin.Boolean value=true
+              then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                  arg0: GET_VAR 'val b: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                  arg1: CONST Double type=kotlin.Double value=2.0
+          then: BLOCK type=kotlin.Unit origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              CONST String type=kotlin.String value="Could not invoke functions name(), age(), a(), or b() from Java"
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: TYPE_OP type=kotlin.Boolean origin=NOT_INSTANCEOF typeOperand=foo.bar.Product
+            GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+          then: RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+            CONST String type=kotlin.String value="User is not a Product"
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: TYPE_OP type=kotlin.Boolean origin=NOT_INSTANCEOF typeOperand=java.io.Serializable
+            GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+          then: RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+            CONST String type=kotlin.String value="User is not Serializable"
+      VAR name:canEqual type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="canEqual"
+            p1: VARARG type=@[FlexibleNullability] @[FlexibleArrayElementVariance] kotlin.Array<out @[FlexibleNullability] java.lang.Class<*>?>? varargElementType=@[FlexibleNullability] java.lang.Class<*>?
+              CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<kotlin.Any> origin=GET_PROPERTY
+                <T>: kotlin.Any
+                $receiver: CLASS_REFERENCE 'CLASS IR_EXTERNAL_DECLARATION_STUB CLASS name:Any modality:OPEN visibility:public superTypes:[]' type=kotlin.reflect.KClass<kotlin.Any>
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+          p1: VARARG type=@[FlexibleNullability] @[FlexibleArrayElementVariance] kotlin.Array<out @[FlexibleNullability] kotlin.Any?>? varargElementType=@[FlexibleNullability] kotlin.Any?
+            GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+            $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+              arg0: GET_VAR 'val canEqual: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+              arg1: CONST Boolean type=kotlin.Boolean value=true
+          then: BLOCK type=kotlin.Unit origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              CONST String type=kotlin.String value="Could invoke function canEqual() from Java but was false"
+      VAR name:productArity type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="productArity"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+            $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+              arg0: GET_VAR 'val productArity: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+              arg1: CONST Int type=kotlin.Int value=4
+          then: BLOCK type=kotlin.Unit origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              STRING_CONCATENATION type=kotlin.String
+                CONST String type=kotlin.String value="Could invoke function productArity() from Java but was "
+                GET_VAR 'val productArity: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+      VAR name:productElement type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="productElement"
+            p1: VARARG type=@[FlexibleNullability] @[FlexibleArrayElementVariance] kotlin.Array<out @[FlexibleNullability] java.lang.Class<*>?>? varargElementType=@[FlexibleNullability] java.lang.Class<*>?
+              CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<kotlin.Int> origin=GET_PROPERTY
+                <T>: kotlin.Int
+                $receiver: CLASS_REFERENCE 'CLASS IR_EXTERNAL_DECLARATION_STUB CLASS name:Int modality:FINAL visibility:public superTypes:[kotlin.Number; kotlin.Comparable<kotlin.Int>; java.io.Serializable]' type=kotlin.reflect.KClass<kotlin.Int>
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+          p1: VARARG type=@[FlexibleNullability] @[FlexibleArrayElementVariance] kotlin.Array<out @[FlexibleNullability] kotlin.Any?>? varargElementType=@[FlexibleNullability] kotlin.Any?
+            CONST Int type=kotlin.Int value=0
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+            $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+              arg0: GET_VAR 'val productElement: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+              arg1: CONST String type=kotlin.String value="John Doe"
+          then: BLOCK type=kotlin.Unit origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              STRING_CONCATENATION type=kotlin.String
+                CONST String type=kotlin.String value="Could invoke function productElement() from Java but was "
+                GET_VAR 'val productElement: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+      TYPE_OP type=kotlin.Unit origin=IMPLICIT_COERCION_TO_UNIT typeOperand=kotlin.Unit
+        TRY type=@[FlexibleNullability] kotlin.Any?
+          try: BLOCK type=@[FlexibleNullability] kotlin.Any? origin=null
+            CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+              $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+                $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+                  <T>: foo.bar.User
+                  $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+                p0: CONST String type=kotlin.String value="productElement"
+                p1: VARARG type=@[FlexibleNullability] @[FlexibleArrayElementVariance] kotlin.Array<out @[FlexibleNullability] java.lang.Class<*>?>? varargElementType=@[FlexibleNullability] java.lang.Class<*>?
+                  CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<kotlin.Int> origin=GET_PROPERTY
+                    <T>: kotlin.Int
+                    $receiver: CLASS_REFERENCE 'CLASS IR_EXTERNAL_DECLARATION_STUB CLASS name:Int modality:FINAL visibility:public superTypes:[kotlin.Number; kotlin.Comparable<kotlin.Int>; java.io.Serializable]' type=kotlin.reflect.KClass<kotlin.Int>
+              p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+              p1: VARARG type=@[FlexibleNullability] @[FlexibleArrayElementVariance] kotlin.Array<out @[FlexibleNullability] kotlin.Any?>? varargElementType=@[FlexibleNullability] kotlin.Any?
+                CONST Int type=kotlin.Int value=10
+          CATCH parameter=val e: java.lang.Exception declared in foo.bar.box
+            VAR CATCH_PARAMETER name:e type:java.lang.Exception [val]
+            BLOCK type=kotlin.Nothing origin=null
+              RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+                CONST String type=kotlin.String value="OK"
+      RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+        CONST String type=kotlin.String value="Could invoke function productElement() from Java but did not throw IndexOutOfBoundsException"
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.fir.txt b/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.fir.txt
new file mode 100644
index 00000000..d038809c
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.fir.txt
@@ -0,0 +1,110 @@
+FILE: dataClassIsProductTest.kt
+    package foo.bar
+
+    public final annotation class Sparkify : R|kotlin/Annotation| {
+        public constructor(): R|foo/bar/Sparkify| {
+            super<R|kotlin/Any|>()
+        }
+
+    }
+    public final annotation class ColumnName : R|kotlin/Annotation| {
+        public constructor(name: R|kotlin/String|): R|foo/bar/ColumnName| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+    }
+    public abstract interface Equals : R|kotlin/Any| {
+        public abstract fun canEqual(that: R|kotlin/Any?|): R|kotlin/Boolean|
+
+    }
+    public abstract interface Product : R|foo/bar/Equals| {
+        public abstract fun productElement(n: R|kotlin/Int|): R|kotlin/Any|
+
+        public abstract fun productArity(): R|kotlin/Int|
+
+    }
+    public final fun box(): R|kotlin/String| {
+        lval user: R|foo/bar/User| = R|foo/bar/User.User|()
+        lval name: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(name)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval age: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(age)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval a: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(a)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval b: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(b)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        when () {
+            !=(R|<local>/name|, String(John Doe)) || !=(R|<local>/age|, Int(25)) || !=(R|<local>/a|, Double(1.0)) || !=(R|<local>/b|, Double(2.0)) ->  {
+                ^box String(Could not invoke functions name(), age(), a(), or b() from Java)
+            }
+        }
+
+        @R|kotlin/Suppress|(names = vararg(String(USELESS_IS_CHECK))) when () {
+            (R|<local>/user| !is R|foo/bar/Product|) ->  {
+                ^box String(User is not a Product)
+            }
+        }
+
+        @R|kotlin/Suppress|(names = vararg(String(USELESS_IS_CHECK))) when () {
+            (R|<local>/user| !is R|java/io/Serializable|) ->  {
+                ^box String(User is not Serializable)
+            }
+        }
+
+        lval canEqual: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(canEqual), vararg(<getClass>(Q|kotlin/Any|).R|kotlin/jvm/java|<R|kotlin/Any|>)).R|java/lang/reflect/Method.invoke|(R|<local>/user|, vararg(R|<local>/user|))
+        when () {
+            !=(R|<local>/canEqual|, Boolean(true)) ->  {
+                ^box String(Could invoke function canEqual() from Java but was false)
+            }
+        }
+
+        lval productArity: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(productArity)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        when () {
+            !=(R|<local>/productArity|, Int(4)) ->  {
+                ^box <strcat>(String(Could invoke function productArity() from Java but was ), R|<local>/productArity|)
+            }
+        }
+
+        lval productElement: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(productElement), vararg(<getClass>(Q|kotlin/Int|).R|kotlin/jvm/java|<R|kotlin/Int|>)).R|java/lang/reflect/Method.invoke|(R|<local>/user|, vararg(Int(0)))
+        when () {
+            !=(R|<local>/productElement|, String(John Doe)) ->  {
+                ^box <strcat>(String(Could invoke function productElement() from Java but was ), R|<local>/productElement|)
+            }
+        }
+
+        try {
+            <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(productElement), vararg(<getClass>(Q|kotlin/Int|).R|kotlin/jvm/java|<R|kotlin/Int|>)).R|java/lang/reflect/Method.invoke|(R|<local>/user|, vararg(Int(10)))
+        }
+        catch (e: R|kotlin/Exception|) {
+            ^box String(OK)
+        }
+
+        ^box String(Could invoke function productElement() from Java but did not throw IndexOutOfBoundsException)
+    }
+    @R|foo/bar/Sparkify|() public final data class User : R|kotlin/Any| {
+        public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25), @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = Double(1.0), test2: R|kotlin/Double| = Double(2.0)): R|foo/bar/User| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+        public final val age: R|kotlin/Int| = R|<local>/age|
+            public get(): R|kotlin/Int|
+
+        public final val test: R|kotlin/Double| = R|<local>/test|
+            public get(): R|kotlin/Double|
+
+        public final val test2: R|kotlin/Double| = R|<local>/test2|
+            @PROPERTY_GETTER:R|foo/bar/ColumnName|(name = String(b)) public get(): R|kotlin/Double|
+
+        public final operator fun component1(): R|kotlin/String|
+
+        public final operator fun component2(): R|kotlin/Int|
+
+        public final operator fun component3(): R|kotlin/Double|
+
+        public final operator fun component4(): R|kotlin/Double|
+
+        public final fun copy(name: R|kotlin/String| = this@R|foo/bar/User|.R|foo/bar/User.name|, age: R|kotlin/Int| = this@R|foo/bar/User|.R|foo/bar/User.age|, @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = this@R|foo/bar/User|.R|foo/bar/User.test|, test2: R|kotlin/Double| = this@R|foo/bar/User|.R|foo/bar/User.test2|): R|foo/bar/User|
+
+    }
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.kt b/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.kt
new file mode 100644
index 00000000..629af08f
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassIsProductTest.kt
@@ -0,0 +1,62 @@
+package foo.bar
+
+annotation class Sparkify
+annotation class ColumnName(val name: String)
+
+// Fake Equals
+interface Equals {
+    fun canEqual(that: Any?): Boolean
+}
+
+// Fake Product
+interface Product: Equals {
+    fun productElement(n: Int): Any
+    fun productArity(): Int
+}
+
+fun box(): String {
+    val user = User()
+    val name = User::class.java.getMethod("name").invoke(user)
+    val age = User::class.java.getMethod("age").invoke(user)
+    val a = User::class.java.getMethod("a").invoke(user)
+    val b = User::class.java.getMethod("b").invoke(user)
+
+    if (name != "John Doe" || age != 25 || a != 1.0 || b != 2.0) {
+        return "Could not invoke functions name(), age(), a(), or b() from Java"
+    }
+    @Suppress("USELESS_IS_CHECK")
+    if (user !is foo.bar.Product)
+        return "User is not a Product"
+
+    @Suppress("USELESS_IS_CHECK")
+    if (user !is java.io.Serializable)
+        return "User is not Serializable"
+
+    val canEqual = User::class.java.getMethod("canEqual", Any::class.java).invoke(user, user)
+    if (canEqual != true) {
+        return "Could invoke function canEqual() from Java but was false"
+    }
+    val productArity = User::class.java.getMethod("productArity").invoke(user)
+    if (productArity != 4) {
+        return "Could invoke function productArity() from Java but was $productArity"
+    }
+    val productElement = User::class.java.getMethod("productElement", Int::class.java).invoke(user, 0)
+    if (productElement != "John Doe") {
+        return "Could invoke function productElement() from Java but was $productElement"
+    }
+    try {
+        User::class.java.getMethod("productElement", Int::class.java).invoke(user, 10)
+    } catch (e: Exception) {
+        return "OK"
+    }
+
+    return "Could invoke function productElement() from Java but did not throw IndexOutOfBoundsException"
+}
+
+@Sparkify
+data class User(
+    val name: String = "John Doe",
+    val age: Int = 25,
+    @ColumnName("a") val test: Double = 1.0,
+    @get:ColumnName("b") val test2: Double = 2.0,
+)
\ No newline at end of file
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassTest.fir.ir.txt b/compiler-plugin/src/test/resources/testData/box/dataClassTest.fir.ir.txt
new file mode 100644
index 00000000..b669ac1b
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassTest.fir.ir.txt
@@ -0,0 +1,634 @@
+FILE fqName:foo.bar fileName:/dataClassTest.kt
+  CLASS ANNOTATION_CLASS name:ColumnName modality:OPEN visibility:public superTypes:[kotlin.Annotation]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.ColumnName
+    PROPERTY name:name visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'name: kotlin.String declared in foo.bar.ColumnName.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.ColumnName) returnType:kotlin.String
+        correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.ColumnName
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.ColumnName'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.ColumnName declared in foo.bar.ColumnName.<get-name>' type=foo.bar.ColumnName origin=null
+    CONSTRUCTOR visibility:public <> (name:kotlin.String) returnType:foo.bar.ColumnName [primary]
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS ANNOTATION_CLASS name:ColumnName modality:OPEN visibility:public superTypes:[kotlin.Annotation]'
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+  CLASS ANNOTATION_CLASS name:Sparkify modality:OPEN visibility:public superTypes:[kotlin.Annotation]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Sparkify
+    CONSTRUCTOR visibility:public <> () returnType:foo.bar.Sparkify [primary]
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS ANNOTATION_CLASS name:Sparkify modality:OPEN visibility:public superTypes:[kotlin.Annotation]'
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Annotation
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+  CLASS CLASS name:NormalUser modality:FINAL visibility:public [data] superTypes:[kotlin.Any]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.NormalUser
+    PROPERTY name:name visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'name: kotlin.String declared in foo.bar.NormalUser.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.NormalUser) returnType:kotlin.String
+        correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.NormalUser'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.<get-name>' type=foo.bar.NormalUser origin=null
+    PROPERTY name:age visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'age: kotlin.Int declared in foo.bar.NormalUser.<init>' type=kotlin.Int origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-age> visibility:public modality:FINAL <> ($this:foo.bar.NormalUser) returnType:kotlin.Int
+        correspondingProperty: PROPERTY name:age visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-age> (): kotlin.Int declared in foo.bar.NormalUser'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+              receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.<get-age>' type=foo.bar.NormalUser origin=null
+    CONSTRUCTOR visibility:public <> (name:kotlin.String, age:kotlin.Int) returnType:foo.bar.NormalUser [primary]
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+        EXPRESSION_BODY
+          CONST String type=kotlin.String value="John Doe"
+      VALUE_PARAMETER name:age index:1 type:kotlin.Int
+        EXPRESSION_BODY
+          CONST Int type=kotlin.Int value=25
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS CLASS name:NormalUser modality:FINAL visibility:public [data] superTypes:[kotlin.Any]'
+    FUN GENERATED_DATA_CLASS_MEMBER name:component1 visibility:public modality:FINAL <> ($this:foo.bar.NormalUser) returnType:kotlin.String [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component1 (): kotlin.String declared in foo.bar.NormalUser'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+            receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.component1' type=foo.bar.NormalUser origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component2 visibility:public modality:FINAL <> ($this:foo.bar.NormalUser) returnType:kotlin.Int [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component2 (): kotlin.Int declared in foo.bar.NormalUser'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+            receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.component2' type=foo.bar.NormalUser origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:copy visibility:public modality:FINAL <> ($this:foo.bar.NormalUser, name:kotlin.String, age:kotlin.Int) returnType:foo.bar.NormalUser
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+            receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.copy' type=foo.bar.NormalUser origin=null
+      VALUE_PARAMETER name:age index:1 type:kotlin.Int
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+            receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.copy' type=foo.bar.NormalUser origin=null
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun copy (name: kotlin.String, age: kotlin.Int): foo.bar.NormalUser declared in foo.bar.NormalUser'
+          CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int) declared in foo.bar.NormalUser' type=foo.bar.NormalUser origin=null
+            name: GET_VAR 'name: kotlin.String declared in foo.bar.NormalUser.copy' type=kotlin.String origin=null
+            age: GET_VAR 'age: kotlin.Int declared in foo.bar.NormalUser.copy' type=kotlin.Int origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:equals visibility:public modality:OPEN <> ($this:foo.bar.NormalUser, other:kotlin.Any?) returnType:kotlin.Boolean [operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+      BLOCK_BODY
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun EQEQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQEQ
+              arg0: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.equals' type=foo.bar.NormalUser origin=null
+              arg1: GET_VAR 'other: kotlin.Any? declared in foo.bar.NormalUser.equals' type=kotlin.Any? origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.NormalUser'
+              CONST Boolean type=kotlin.Boolean value=true
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: TYPE_OP type=kotlin.Boolean origin=NOT_INSTANCEOF typeOperand=foo.bar.NormalUser
+              GET_VAR 'other: kotlin.Any? declared in foo.bar.NormalUser.equals' type=kotlin.Any? origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.NormalUser'
+              CONST Boolean type=kotlin.Boolean value=false
+        VAR IR_TEMPORARY_VARIABLE name:tmp_0 type:foo.bar.NormalUser [val]
+          TYPE_OP type=foo.bar.NormalUser origin=CAST typeOperand=foo.bar.NormalUser
+            GET_VAR 'other: kotlin.Any? declared in foo.bar.NormalUser.equals' type=kotlin.Any? origin=null
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.equals' type=foo.bar.NormalUser origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR 'val tmp_0: foo.bar.NormalUser declared in foo.bar.NormalUser.equals' type=foo.bar.NormalUser origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.NormalUser'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.equals' type=foo.bar.NormalUser origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR 'val tmp_0: foo.bar.NormalUser declared in foo.bar.NormalUser.equals' type=foo.bar.NormalUser origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.NormalUser'
+              CONST Boolean type=kotlin.Boolean value=false
+        RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.NormalUser'
+          CONST Boolean type=kotlin.Boolean value=true
+    FUN GENERATED_DATA_CLASS_MEMBER name:hashCode visibility:public modality:OPEN <> ($this:foo.bar.NormalUser) returnType:kotlin.Int
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+      BLOCK_BODY
+        VAR name:result type:kotlin.Int [var]
+          CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.String' type=kotlin.Int origin=null
+            $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.hashCode' type=foo.bar.NormalUser origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.NormalUser.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.NormalUser.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.hashCode' type=foo.bar.NormalUser origin=null
+        RETURN type=kotlin.Nothing from='public open fun hashCode (): kotlin.Int declared in foo.bar.NormalUser'
+          GET_VAR 'var result: kotlin.Int declared in foo.bar.NormalUser.hashCode' type=kotlin.Int origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:toString visibility:public modality:OPEN <> ($this:foo.bar.NormalUser) returnType:kotlin.String
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.NormalUser
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun toString (): kotlin.String declared in foo.bar.NormalUser'
+          STRING_CONCATENATION type=kotlin.String
+            CONST String type=kotlin.String value="NormalUser("
+            CONST String type=kotlin.String value="name="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.toString' type=foo.bar.NormalUser origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="age="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+              receiver: GET_VAR '<this>: foo.bar.NormalUser declared in foo.bar.NormalUser.toString' type=foo.bar.NormalUser origin=null
+            CONST String type=kotlin.String value=")"
+  CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]
+    annotations:
+      Sparkify
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.User
+    PROPERTY name:name visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'name: kotlin.String declared in foo.bar.User.<init>' type=kotlin.String origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-name> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.String
+        annotations:
+          JvmName(name = "name")
+        correspondingProperty: PROPERTY name:name visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-name> (): kotlin.String declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-name>' type=foo.bar.User origin=null
+    PROPERTY name:age visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'age: kotlin.Int declared in foo.bar.User.<init>' type=kotlin.Int origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-age> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Int
+        annotations:
+          JvmName(name = "age")
+        correspondingProperty: PROPERTY name:age visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-age> (): kotlin.Int declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-age>' type=foo.bar.User origin=null
+    PROPERTY name:test visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'test: kotlin.Double declared in foo.bar.User.<init>' type=kotlin.Double origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-test> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double
+        annotations:
+          JvmName(name = "a")
+        correspondingProperty: PROPERTY name:test visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-test> (): kotlin.Double declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-test>' type=foo.bar.User origin=null
+    PROPERTY name:test2 visibility:public modality:FINAL [val]
+      FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]
+        EXPRESSION_BODY
+          GET_VAR 'test2: kotlin.Double declared in foo.bar.User.<init>' type=kotlin.Double origin=INITIALIZE_PROPERTY_FROM_PARAMETER
+      FUN DEFAULT_PROPERTY_ACCESSOR name:<get-test2> visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double
+        annotations:
+          ColumnName(name = "b")
+          JvmName(name = "b")
+        correspondingProperty: PROPERTY name:test2 visibility:public modality:FINAL [val]
+        $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+        BLOCK_BODY
+          RETURN type=kotlin.Nothing from='public final fun <get-test2> (): kotlin.Double declared in foo.bar.User'
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.<get-test2>' type=foo.bar.User origin=null
+    CONSTRUCTOR visibility:public <> (name:kotlin.String, age:kotlin.Int, test:kotlin.Double, test2:kotlin.Double) returnType:foo.bar.User [primary]
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+        EXPRESSION_BODY
+          CONST String type=kotlin.String value="John Doe"
+      VALUE_PARAMETER name:age index:1 type:kotlin.Int
+        EXPRESSION_BODY
+          CONST Int type=kotlin.Int value=25
+      VALUE_PARAMETER name:test index:2 type:kotlin.Double
+        annotations:
+          ColumnName(name = "a")
+        EXPRESSION_BODY
+          CONST Double type=kotlin.Double value=1.0
+      VALUE_PARAMETER name:test2 index:3 type:kotlin.Double
+        EXPRESSION_BODY
+          CONST Double type=kotlin.Double value=2.0
+      BLOCK_BODY
+        DELEGATING_CONSTRUCTOR_CALL 'public constructor <init> () declared in kotlin.Any'
+        INSTANCE_INITIALIZER_CALL classDescriptor='CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]'
+    FUN GENERATED_DATA_CLASS_MEMBER name:component1 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.String [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component1 (): kotlin.String declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component1' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component2 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Int [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component2 (): kotlin.Int declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component2' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component3 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component3 (): kotlin.Double declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component3' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:component4 visibility:public modality:FINAL <> ($this:foo.bar.User) returnType:kotlin.Double [operator]
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun component4 (): kotlin.Double declared in foo.bar.User'
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.component4' type=foo.bar.User origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:copy visibility:public modality:FINAL <> ($this:foo.bar.User, name:kotlin.String, age:kotlin.Int, test:kotlin.Double, test2:kotlin.Double) returnType:foo.bar.User
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      VALUE_PARAMETER name:name index:0 type:kotlin.String
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      VALUE_PARAMETER name:age index:1 type:kotlin.Int
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      VALUE_PARAMETER name:test index:2 type:kotlin.Double
+        annotations:
+          ColumnName(name = "a")
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      VALUE_PARAMETER name:test2 index:3 type:kotlin.Double
+        EXPRESSION_BODY
+          GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+            receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.copy' type=foo.bar.User origin=null
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public final fun copy (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double): foo.bar.User declared in foo.bar.User'
+          CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double) declared in foo.bar.User' type=foo.bar.User origin=null
+            name: GET_VAR 'name: kotlin.String declared in foo.bar.User.copy' type=kotlin.String origin=null
+            age: GET_VAR 'age: kotlin.Int declared in foo.bar.User.copy' type=kotlin.Int origin=null
+            test: GET_VAR 'test: kotlin.Double declared in foo.bar.User.copy' type=kotlin.Double origin=null
+            test2: GET_VAR 'test2: kotlin.Double declared in foo.bar.User.copy' type=kotlin.Double origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:equals visibility:public modality:OPEN <> ($this:foo.bar.User, other:kotlin.Any?) returnType:kotlin.Boolean [operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+      BLOCK_BODY
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun EQEQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQEQ
+              arg0: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+              arg1: GET_VAR 'other: kotlin.Any? declared in foo.bar.User.equals' type=kotlin.Any? origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=true
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: TYPE_OP type=kotlin.Boolean origin=NOT_INSTANCEOF typeOperand=foo.bar.User
+              GET_VAR 'other: kotlin.Any? declared in foo.bar.User.equals' type=kotlin.Any? origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        VAR IR_TEMPORARY_VARIABLE name:tmp_1 type:foo.bar.User [val]
+          TYPE_OP type=foo.bar.User origin=CAST typeOperand=foo.bar.User
+            GET_VAR 'other: kotlin.Any? declared in foo.bar.User.equals' type=kotlin.Any? origin=null
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+                  receiver: GET_VAR 'val tmp_1: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                  receiver: GET_VAR 'val tmp_1: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR 'val tmp_1: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        WHEN type=kotlin.Unit origin=null
+          BRANCH
+            if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+              $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                arg0: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+                arg1: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                  receiver: GET_VAR 'val tmp_1: foo.bar.User declared in foo.bar.User.equals' type=foo.bar.User origin=null
+            then: RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+              CONST Boolean type=kotlin.Boolean value=false
+        RETURN type=kotlin.Nothing from='public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+          CONST Boolean type=kotlin.Boolean value=true
+    FUN GENERATED_DATA_CLASS_MEMBER name:hashCode visibility:public modality:OPEN <> ($this:foo.bar.User) returnType:kotlin.Int
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        VAR name:result type:kotlin.Int [var]
+          CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.String' type=kotlin.Int origin=null
+            $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+                receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Double' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        SET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Unit origin=EQ
+          CALL 'public final fun plus (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+            $this: CALL 'public final fun times (other: kotlin.Int): kotlin.Int declared in kotlin.Int' type=kotlin.Int origin=null
+              $this: GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+              other: CONST Int type=kotlin.Int value=31
+            other: CALL 'public open fun hashCode (): kotlin.Int declared in kotlin.Double' type=kotlin.Int origin=null
+              $this: GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+                receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.hashCode' type=foo.bar.User origin=null
+        RETURN type=kotlin.Nothing from='public open fun hashCode (): kotlin.Int declared in foo.bar.User'
+          GET_VAR 'var result: kotlin.Int declared in foo.bar.User.hashCode' type=kotlin.Int origin=null
+    FUN GENERATED_DATA_CLASS_MEMBER name:toString visibility:public modality:OPEN <> ($this:foo.bar.User) returnType:kotlin.String
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun toString (): kotlin.String declared in foo.bar.User'
+          STRING_CONCATENATION type=kotlin.String
+            CONST String type=kotlin.String value="User("
+            CONST String type=kotlin.String value="name="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:name type:kotlin.String visibility:private [final]' type=kotlin.String origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="age="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:age type:kotlin.Int visibility:private [final]' type=kotlin.Int origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="test="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=", "
+            CONST String type=kotlin.String value="test2="
+            GET_FIELD 'FIELD PROPERTY_BACKING_FIELD name:test2 type:kotlin.Double visibility:private [final]' type=kotlin.Double origin=null
+              receiver: GET_VAR '<this>: foo.bar.User declared in foo.bar.User.toString' type=foo.bar.User origin=null
+            CONST String type=kotlin.String value=")"
+    FUN name:canEqual visibility:public modality:OPEN <> ($this:foo.bar.User, that:kotlin.Any?) returnType:kotlin.Boolean
+      overridden:
+        public abstract fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:$this type:foo.bar.User
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.User'
+          TYPE_OP type=kotlin.Boolean origin=INSTANCEOF typeOperand=foo.bar.User
+            GET_VAR 'that: kotlin.Any? declared in foo.bar.User.canEqual' type=kotlin.Any? origin=null
+    FUN name:productArity visibility:public modality:OPEN <> ($this:foo.bar.User) returnType:kotlin.Int
+      overridden:
+        public abstract fun productArity (): kotlin.Int declared in foo.bar.Product
+      $this: VALUE_PARAMETER name:$this type:foo.bar.User
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun productArity (): kotlin.Int declared in foo.bar.User'
+          CONST Int type=kotlin.Int value=4
+    FUN name:productElement visibility:public modality:OPEN <> ($this:foo.bar.User, n:kotlin.Int) returnType:kotlin.Any?
+      overridden:
+        public abstract fun productElement (n: kotlin.Int): kotlin.Any declared in foo.bar.Product
+      $this: VALUE_PARAMETER name:$this type:foo.bar.User
+      VALUE_PARAMETER name:n index:0 type:kotlin.Int
+      BLOCK_BODY
+        RETURN type=kotlin.Nothing from='public open fun productElement (n: kotlin.Int): kotlin.Any? declared in foo.bar.User'
+          WHEN type=kotlin.Any? origin=IF
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=0
+              then: CALL 'public final fun <get-name> (): kotlin.String declared in foo.bar.User' type=kotlin.String origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=1
+              then: CALL 'public final fun <get-age> (): kotlin.Int declared in foo.bar.User' type=kotlin.Int origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=2
+              then: CALL 'public final fun <get-test> (): kotlin.Double declared in foo.bar.User' type=kotlin.Double origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EQEQ
+                arg0: GET_VAR 'n: kotlin.Int declared in foo.bar.User.productElement' type=kotlin.Int origin=null
+                arg1: CONST Int type=kotlin.Int value=3
+              then: CALL 'public final fun <get-test2> (): kotlin.Double declared in foo.bar.User' type=kotlin.Double origin=GET_PROPERTY
+                $this: GET_VAR '$this: foo.bar.User declared in foo.bar.User.productElement' type=foo.bar.User origin=null
+            BRANCH
+              if: CONST Boolean type=kotlin.Boolean value=true
+              then: THROW type=kotlin.Nothing
+                CONSTRUCTOR_CALL 'public constructor <init> () declared in java.lang.IndexOutOfBoundsException' type=java.lang.IndexOutOfBoundsException origin=null
+  CLASS INTERFACE name:Equals modality:ABSTRACT visibility:public superTypes:[kotlin.Any]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Equals
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN name:canEqual visibility:public modality:ABSTRACT <> ($this:foo.bar.Equals, that:kotlin.Any?) returnType:kotlin.Boolean
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Equals
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+  CLASS INTERFACE name:Product modality:ABSTRACT visibility:public superTypes:[foo.bar.Equals]
+    $this: VALUE_PARAMETER INSTANCE_RECEIVER name:<this> type:foo.bar.Product
+    FUN FAKE_OVERRIDE name:canEqual visibility:public modality:ABSTRACT <> ($this:foo.bar.Equals, that:kotlin.Any?) returnType:kotlin.Boolean [fake_override]
+      overridden:
+        public abstract fun canEqual (that: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Equals
+      VALUE_PARAMETER name:that index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:equals visibility:public modality:OPEN <> ($this:kotlin.Any, other:kotlin.Any?) returnType:kotlin.Boolean [fake_override,operator]
+      overridden:
+        public open fun equals (other: kotlin.Any?): kotlin.Boolean declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+      VALUE_PARAMETER name:other index:0 type:kotlin.Any?
+    FUN FAKE_OVERRIDE name:hashCode visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.Int [fake_override]
+      overridden:
+        public open fun hashCode (): kotlin.Int declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN FAKE_OVERRIDE name:toString visibility:public modality:OPEN <> ($this:kotlin.Any) returnType:kotlin.String [fake_override]
+      overridden:
+        public open fun toString (): kotlin.String declared in foo.bar.Equals
+      $this: VALUE_PARAMETER name:<this> type:kotlin.Any
+    FUN name:productArity visibility:public modality:ABSTRACT <> ($this:foo.bar.Product) returnType:kotlin.Int
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Product
+    FUN name:productElement visibility:public modality:ABSTRACT <> ($this:foo.bar.Product, n:kotlin.Int) returnType:kotlin.Any
+      $this: VALUE_PARAMETER name:<this> type:foo.bar.Product
+      VALUE_PARAMETER name:n index:0 type:kotlin.Int
+  FUN name:box visibility:public modality:FINAL <> () returnType:kotlin.String
+    BLOCK_BODY
+      VAR name:user type:foo.bar.User [val]
+        CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int, test: kotlin.Double, test2: kotlin.Double) declared in foo.bar.User' type=foo.bar.User origin=null
+      VAR name:name type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="name"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      VAR name:age type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="age"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      VAR name:a type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="a"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      VAR name:b type:@[FlexibleNullability] kotlin.Any? [val]
+        CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+          $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+            $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.User> origin=GET_PROPERTY
+              <T>: foo.bar.User
+              $receiver: CLASS_REFERENCE 'CLASS CLASS name:User modality:FINAL visibility:public [data] superTypes:[kotlin.Any; foo.bar.Product; java.io.Serializable]' type=kotlin.reflect.KClass<foo.bar.User>
+            p0: CONST String type=kotlin.String value="b"
+          p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+      WHEN type=kotlin.Unit origin=IF
+        BRANCH
+          if: WHEN type=kotlin.Boolean origin=OROR
+            BRANCH
+              if: WHEN type=kotlin.Boolean origin=OROR
+                BRANCH
+                  if: WHEN type=kotlin.Boolean origin=OROR
+                    BRANCH
+                      if: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                        $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                          arg0: GET_VAR 'val name: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                          arg1: CONST String type=kotlin.String value="John Doe"
+                      then: CONST Boolean type=kotlin.Boolean value=true
+                    BRANCH
+                      if: CONST Boolean type=kotlin.Boolean value=true
+                      then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                        $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                          arg0: GET_VAR 'val age: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                          arg1: CONST Int type=kotlin.Int value=25
+                  then: CONST Boolean type=kotlin.Boolean value=true
+                BRANCH
+                  if: CONST Boolean type=kotlin.Boolean value=true
+                  then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                    $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                      arg0: GET_VAR 'val a: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                      arg1: CONST Double type=kotlin.Double value=1.0
+              then: CONST Boolean type=kotlin.Boolean value=true
+            BRANCH
+              if: CONST Boolean type=kotlin.Boolean value=true
+              then: CALL 'public final fun not (): kotlin.Boolean declared in kotlin.Boolean' type=kotlin.Boolean origin=EXCLEQ
+                $this: CALL 'public final fun EQEQ (arg0: kotlin.Any?, arg1: kotlin.Any?): kotlin.Boolean declared in kotlin.internal.ir' type=kotlin.Boolean origin=EXCLEQ
+                  arg0: GET_VAR 'val b: @[FlexibleNullability] kotlin.Any? declared in foo.bar.box' type=@[FlexibleNullability] kotlin.Any? origin=null
+                  arg1: CONST Double type=kotlin.Double value=2.0
+          then: BLOCK type=kotlin.Unit origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              CONST String type=kotlin.String value="Could not invoke functions name(), age(), a(), or b() from Java"
+      TRY type=kotlin.Unit
+        try: BLOCK type=kotlin.Unit origin=null
+          VAR name:normalUser type:foo.bar.NormalUser [val]
+            CONSTRUCTOR_CALL 'public constructor <init> (name: kotlin.String, age: kotlin.Int) declared in foo.bar.NormalUser' type=foo.bar.NormalUser origin=null
+          VAR name:name type:@[FlexibleNullability] kotlin.Any? [val]
+            CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+              $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+                $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.NormalUser> origin=GET_PROPERTY
+                  <T>: foo.bar.NormalUser
+                  $receiver: CLASS_REFERENCE 'CLASS CLASS name:NormalUser modality:FINAL visibility:public [data] superTypes:[kotlin.Any]' type=kotlin.reflect.KClass<foo.bar.NormalUser>
+                p0: CONST String type=kotlin.String value="name"
+              p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+          VAR name:age type:@[FlexibleNullability] kotlin.Any? [val]
+            CALL 'public open fun invoke (p0: @[FlexibleNullability] kotlin.Any?, vararg p1: @[FlexibleNullability] kotlin.Any?): @[FlexibleNullability] kotlin.Any? declared in java.lang.reflect.Method' type=@[FlexibleNullability] kotlin.Any? origin=null
+              $this: CALL 'public open fun getMethod (p0: @[FlexibleNullability] kotlin.String?, vararg p1: @[FlexibleNullability] java.lang.Class<*>?): @[FlexibleNullability] java.lang.reflect.Method? declared in java.lang.Class' type=@[FlexibleNullability] java.lang.reflect.Method? origin=null
+                $this: CALL 'public final fun <get-java> <T> (): java.lang.Class<T of kotlin.jvm.<get-java>> declared in kotlin.jvm' type=java.lang.Class<foo.bar.NormalUser> origin=GET_PROPERTY
+                  <T>: foo.bar.NormalUser
+                  $receiver: CLASS_REFERENCE 'CLASS CLASS name:NormalUser modality:FINAL visibility:public [data] superTypes:[kotlin.Any]' type=kotlin.reflect.KClass<foo.bar.NormalUser>
+                p0: CONST String type=kotlin.String value="age"
+              p0: GET_VAR 'val user: foo.bar.User declared in foo.bar.box' type=foo.bar.User origin=null
+        CATCH parameter=val e: java.lang.Exception declared in foo.bar.box
+          VAR CATCH_PARAMETER name:e type:java.lang.Exception [val]
+          BLOCK type=kotlin.Nothing origin=null
+            RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+              CONST String type=kotlin.String value="OK"
+      RETURN type=kotlin.Nothing from='public final fun box (): kotlin.String declared in foo.bar'
+        CONST String type=kotlin.String value="Fail"
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassTest.fir.txt b/compiler-plugin/src/test/resources/testData/box/dataClassTest.fir.txt
new file mode 100644
index 00000000..7a880636
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassTest.fir.txt
@@ -0,0 +1,97 @@
+FILE: dataClassTest.kt
+    package foo.bar
+
+    public final annotation class Sparkify : R|kotlin/Annotation| {
+        public constructor(): R|foo/bar/Sparkify| {
+            super<R|kotlin/Any|>()
+        }
+
+    }
+    public final annotation class ColumnName : R|kotlin/Annotation| {
+        public constructor(name: R|kotlin/String|): R|foo/bar/ColumnName| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+    }
+    public abstract interface Equals : R|kotlin/Any| {
+        public abstract fun canEqual(that: R|kotlin/Any?|): R|kotlin/Boolean|
+
+    }
+    public abstract interface Product : R|foo/bar/Equals| {
+        public abstract fun productElement(n: R|kotlin/Int|): R|kotlin/Any|
+
+        public abstract fun productArity(): R|kotlin/Int|
+
+    }
+    public final fun box(): R|kotlin/String| {
+        lval user: R|foo/bar/User| = R|foo/bar/User.User|()
+        lval name: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(name)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval age: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(age)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval a: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(a)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        lval b: R|kotlin/Any!| = <getClass>(Q|foo/bar/User|).R|kotlin/jvm/java|<R|foo/bar/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(b)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        when () {
+            !=(R|<local>/name|, String(John Doe)) || !=(R|<local>/age|, Int(25)) || !=(R|<local>/a|, Double(1.0)) || !=(R|<local>/b|, Double(2.0)) ->  {
+                ^box String(Could not invoke functions name(), age(), a(), or b() from Java)
+            }
+        }
+
+        try {
+            lval normalUser: R|foo/bar/NormalUser| = R|foo/bar/NormalUser.NormalUser|()
+            lval name: R|kotlin/Any!| = <getClass>(Q|foo/bar/NormalUser|).R|kotlin/jvm/java|<R|foo/bar/NormalUser|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(name)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+            lval age: R|kotlin/Any!| = <getClass>(Q|foo/bar/NormalUser|).R|kotlin/jvm/java|<R|foo/bar/NormalUser|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(age)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
+        }
+        catch (e: R|kotlin/Exception|) {
+            ^box String(OK)
+        }
+
+        ^box String(Fail)
+    }
+    @R|foo/bar/Sparkify|() public final data class User : R|kotlin/Any| {
+        public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25), @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = Double(1.0), test2: R|kotlin/Double| = Double(2.0)): R|foo/bar/User| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+        public final val age: R|kotlin/Int| = R|<local>/age|
+            public get(): R|kotlin/Int|
+
+        public final val test: R|kotlin/Double| = R|<local>/test|
+            public get(): R|kotlin/Double|
+
+        public final val test2: R|kotlin/Double| = R|<local>/test2|
+            @PROPERTY_GETTER:R|foo/bar/ColumnName|(name = String(b)) public get(): R|kotlin/Double|
+
+        public final operator fun component1(): R|kotlin/String|
+
+        public final operator fun component2(): R|kotlin/Int|
+
+        public final operator fun component3(): R|kotlin/Double|
+
+        public final operator fun component4(): R|kotlin/Double|
+
+        public final fun copy(name: R|kotlin/String| = this@R|foo/bar/User|.R|foo/bar/User.name|, age: R|kotlin/Int| = this@R|foo/bar/User|.R|foo/bar/User.age|, @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = this@R|foo/bar/User|.R|foo/bar/User.test|, test2: R|kotlin/Double| = this@R|foo/bar/User|.R|foo/bar/User.test2|): R|foo/bar/User|
+
+    }
+    public final data class NormalUser : R|kotlin/Any| {
+        public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25)): R|foo/bar/NormalUser| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+        public final val age: R|kotlin/Int| = R|<local>/age|
+            public get(): R|kotlin/Int|
+
+        public final operator fun component1(): R|kotlin/String|
+
+        public final operator fun component2(): R|kotlin/Int|
+
+        public final fun copy(name: R|kotlin/String| = this@R|foo/bar/NormalUser|.R|foo/bar/NormalUser.name|, age: R|kotlin/Int| = this@R|foo/bar/NormalUser|.R|foo/bar/NormalUser.age|): R|foo/bar/NormalUser|
+
+    }
diff --git a/compiler-plugin/src/test/resources/testData/box/dataClassTest.kt b/compiler-plugin/src/test/resources/testData/box/dataClassTest.kt
new file mode 100644
index 00000000..2fa3973a
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/box/dataClassTest.kt
@@ -0,0 +1,50 @@
+package foo.bar
+
+annotation class Sparkify
+annotation class ColumnName(val name: String)
+
+// Fake Equals
+interface Equals {
+    fun canEqual(that: Any?): Boolean
+}
+
+// Fake Product
+interface Product: Equals {
+    fun productElement(n: Int): Any
+    fun productArity(): Int
+}
+
+fun box(): String {
+    val user = User()
+    val name = User::class.java.getMethod("name").invoke(user)
+    val age = User::class.java.getMethod("age").invoke(user)
+    val a = User::class.java.getMethod("a").invoke(user)
+    val b = User::class.java.getMethod("b").invoke(user)
+
+    if (name != "John Doe" || age != 25 || a != 1.0 || b != 2.0) {
+        return "Could not invoke functions name(), age(), a(), or b() from Java"
+    }
+
+    try {
+        val normalUser = NormalUser()
+        val name = NormalUser::class.java.getMethod("name").invoke(user)
+        val age = NormalUser::class.java.getMethod("age").invoke(user)
+    } catch (e: Exception) {
+        return "OK"
+    }
+
+    return "Fail"
+}
+
+@Sparkify
+data class User(
+    val name: String = "John Doe",
+    val age: Int = 25,
+    @ColumnName("a") val test: Double = 1.0,
+    @get:ColumnName("b") val test2: Double = 2.0,
+)
+
+data class NormalUser(
+    val name: String = "John Doe",
+    val age: Int = 25,
+)
\ No newline at end of file
diff --git a/compiler-plugin/src/test/resources/testData/diagnostics/dataClassTest.fir.txt b/compiler-plugin/src/test/resources/testData/diagnostics/dataClassTest.fir.txt
new file mode 100644
index 00000000..165e02fd
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/diagnostics/dataClassTest.fir.txt
@@ -0,0 +1,66 @@
+FILE: dataClassTest.kt
+    package foo.bar
+
+    public final annotation class Sparkify : R|kotlin/Annotation| {
+        public constructor(): R|foo/bar/Sparkify| {
+            super<R|kotlin/Any|>()
+        }
+
+    }
+    public final annotation class ColumnName : R|kotlin/Annotation| {
+        public constructor(name: R|kotlin/String|): R|foo/bar/ColumnName| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+    }
+    public abstract interface Equals : R|kotlin/Any| {
+        public abstract fun canEqual(that: R|kotlin/Any?|): R|kotlin/Boolean|
+
+    }
+    public abstract interface Product : R|foo/bar/Equals| {
+        public abstract fun productElement(n: R|kotlin/Int|): R|kotlin/Any|
+
+        public abstract fun productArity(): R|kotlin/Int|
+
+    }
+    public final fun test(): R|kotlin/Unit| {
+        lval user: R|foo/bar/User| = R|foo/bar/User.User|()
+        R|<local>/user|.R|foo/bar/User.productArity|()
+    }
+    @R|foo/bar/Sparkify|() public final data class User : R|kotlin/Any|, R|foo/bar/Product| {
+        public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25), @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = Double(1.0), test2: R|kotlin/Double| = Double(2.0)): R|foo/bar/User| {
+            super<R|kotlin/Any|>()
+        }
+
+        public final val name: R|kotlin/String| = R|<local>/name|
+            public get(): R|kotlin/String|
+
+        public final val age: R|kotlin/Int| = R|<local>/age|
+            public get(): R|kotlin/Int|
+
+        public final val test: R|kotlin/Double| = R|<local>/test|
+            public get(): R|kotlin/Double|
+
+        public final val test2: R|kotlin/Double| = R|<local>/test2|
+            @PROPERTY_GETTER:R|foo/bar/ColumnName|(name = String(b)) public get(): R|kotlin/Double|
+
+        public final operator fun component1(): R|kotlin/String|
+
+        public final operator fun component2(): R|kotlin/Int|
+
+        public final operator fun component3(): R|kotlin/Double|
+
+        public final operator fun component4(): R|kotlin/Double|
+
+        public final fun copy(name: R|kotlin/String| = this@R|foo/bar/User|.R|foo/bar/User.name|, age: R|kotlin/Int| = this@R|foo/bar/User|.R|foo/bar/User.age|, @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = this@R|foo/bar/User|.R|foo/bar/User.test|, test2: R|kotlin/Double| = this@R|foo/bar/User|.R|foo/bar/User.test2|): R|foo/bar/User|
+
+        public final fun productArity(): R|kotlin/Int|
+
+        public final fun productElement(n: R|kotlin/Int|): R|kotlin/Any?|
+
+        public final fun canEqual(that: R|kotlin/Any?|): R|kotlin/Boolean|
+
+    }
diff --git a/compiler-plugin/src/test/resources/testData/diagnostics/dataClassTest.kt b/compiler-plugin/src/test/resources/testData/diagnostics/dataClassTest.kt
new file mode 100644
index 00000000..4b86f4c1
--- /dev/null
+++ b/compiler-plugin/src/test/resources/testData/diagnostics/dataClassTest.kt
@@ -0,0 +1,28 @@
+package foo.bar
+
+annotation class Sparkify
+annotation class ColumnName(val name: String)
+
+// Fake Equals
+interface Equals {
+    fun canEqual(that: Any?): Boolean
+}
+
+// Fake Product
+interface Product: Equals {
+    fun productElement(n: Int): Any
+    fun productArity(): Int
+}
+
+fun test() {
+    val user = User()
+    user.productArity() // should not be an error
+}
+
+@Sparkify
+data <!ABSTRACT_MEMBER_NOT_IMPLEMENTED!>class User<!>(
+    val name: String = "John Doe",
+    val age: Int = 25,
+    @ColumnName("a") val test: Double = 1.0,
+    @get:ColumnName("b") val test2: Double = 2.0,
+)
diff --git a/core/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/src/main/scala/org/apache/spark/sql/KotlinReflection.scala
deleted file mode 100644
index 4916ceb7..00000000
--- a/core/src/main/scala/org/apache/spark/sql/KotlinReflection.scala
+++ /dev/null
@@ -1,1497 +0,0 @@
-/*-
- * =LICENSE=
- * Kotlin Spark API: Examples
- * ----------
- * Copyright (C) 2019 - 2020 JetBrains
- * ----------
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * =LICENSEEND=
- */
-
-package org.apache.spark.sql
-
-import org.apache.commons.lang3.reflect.ConstructorUtils
-import org.apache.spark.internal.Logging
-import org.apache.spark.sql.catalyst.DeserializerBuildHelper._
-import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, dataTypeFor, getClassFromType, isSubtype, javaBoxedType, localTypeOf, mirror, universe}
-import org.apache.spark.sql.catalyst.SerializerBuildHelper._
-import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal
-import org.apache.spark.sql.catalyst.expressions.objects._
-import org.apache.spark.sql.catalyst.expressions.{Expression, _}
-import org.apache.spark.sql.catalyst.util.ArrayBasedMapData
-import org.apache.spark.sql.catalyst.{DefinedByConstructorParams, InternalRow, ScalaReflection, WalkedTypePath}
-import org.apache.spark.sql.types._
-import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
-import org.apache.spark.util.Utils
-
-import java.beans.{Introspector, PropertyDescriptor}
-import java.lang.Exception
-import java.lang.reflect.Method
-
-
-/**
- * A helper trait to create [[org.apache.spark.sql.catalyst.encoders.ExpressionEncoder]]s
- * for classes whose fields are entirely defined by constructor params but should not be
- * case classes.
- */
-//trait DefinedByConstructorParams
-
-/**
- * KotlinReflection is heavily inspired by ScalaReflection and even extends it just to add several methods
- */
-//noinspection RedundantBlock
-object KotlinReflection extends KotlinReflection {
-    /**
-     * Returns the Spark SQL DataType for a given java class.  Where this is not an exact mapping
-     * to a native type, an ObjectType is returned.
-     *
-     * Unlike `inferDataType`, this function doesn't do any massaging of types into the Spark SQL type
-     * system.  As a result, ObjectType will be returned for things like boxed Integers.
-     */
-    private def inferExternalType(cls: Class[_]): DataType = cls match {
-        case c if c == java.lang.Boolean.TYPE => BooleanType
-        case c if c == java.lang.Byte.TYPE => ByteType
-        case c if c == java.lang.Short.TYPE => ShortType
-        case c if c == java.lang.Integer.TYPE => IntegerType
-        case c if c == java.lang.Long.TYPE => LongType
-        case c if c == java.lang.Float.TYPE => FloatType
-        case c if c == java.lang.Double.TYPE => DoubleType
-        case c if c == classOf[Array[Byte]] => BinaryType
-        case c if c == classOf[Decimal] => DecimalType.SYSTEM_DEFAULT
-        case c if c == classOf[CalendarInterval] => CalendarIntervalType
-        case _ => ObjectType(cls)
-    }
-
-    val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
-
-    // Since we are creating a runtime mirror using the class loader of current thread,
-    // we need to use def at here. So, every time we call mirror, it is using the
-    // class loader of the current thread.
-    override def mirror: universe.Mirror = {
-        universe.runtimeMirror(Thread.currentThread().getContextClassLoader)
-    }
-
-    import universe._
-
-    // The Predef.Map is scala.collection.immutable.Map.
-    // Since the map values can be mutable, we explicitly import scala.collection.Map at here.
-    import scala.collection.Map
-
-
-    def isSubtype(t: universe.Type, t2: universe.Type): Boolean = t <:< t2
-
-    /**
-     * Synchronize to prevent concurrent usage of `<:<` operator.
-     * This operator is not thread safe in any current version of scala; i.e.
-     * (2.11.12, 2.12.10, 2.13.0-M5).
-     *
-     * See https://github.com/scala/bug/issues/10766
-     */
-    /*
-      private[catalyst] def isSubtype(tpe1: `Type`, tpe2: `Type`): Boolean = {
-        ScalaReflection.ScalaSubtypeLock.synchronized {
-          tpe1 <:< tpe2
-        }
-      }
-    */
-
-    private def dataTypeFor(tpe: `Type`): DataType = cleanUpReflectionObjects {
-        tpe.dealias match {
-            case t if isSubtype(t, definitions.NullTpe) => NullType
-            case t if isSubtype(t, definitions.IntTpe) => IntegerType
-            case t if isSubtype(t, definitions.LongTpe) => LongType
-            case t if isSubtype(t, definitions.DoubleTpe) => DoubleType
-            case t if isSubtype(t, definitions.FloatTpe) => FloatType
-            case t if isSubtype(t, definitions.ShortTpe) => ShortType
-            case t if isSubtype(t, definitions.ByteTpe) => ByteType
-            case t if isSubtype(t, definitions.BooleanTpe) => BooleanType
-            case t if isSubtype(t, localTypeOf[Array[Byte]]) => BinaryType
-            case t if isSubtype(t, localTypeOf[CalendarInterval]) => CalendarIntervalType
-            case t if isSubtype(t, localTypeOf[Decimal]) => DecimalType.SYSTEM_DEFAULT
-            case _ => {
-                val className = getClassNameFromType(tpe)
-                className match {
-                    case "scala.Array" => {
-                        val TypeRef(_, _, Seq(elementType)) = tpe.dealias
-                        arrayClassFor(elementType)
-                    }
-                    case _ => {
-                        val clazz = getClassFromType(tpe)
-                        ObjectType(clazz)
-                    }
-                }
-            }
-        }
-    }
-
-    /**
-     * Given a type `T` this function constructs `ObjectType` that holds a class of type
-     * `Array[T]`.
-     *
-     * Special handling is performed for primitive types to map them back to their raw
-     * JVM form instead of the Scala Array that handles auto boxing.
-     */
-    private def arrayClassFor(tpe: `Type`): ObjectType = cleanUpReflectionObjects {
-        val cls = tpe.dealias match {
-            case t if isSubtype(t, definitions.IntTpe) => classOf[Array[Int]]
-            case t if isSubtype(t, definitions.LongTpe) => classOf[Array[Long]]
-            case t if isSubtype(t, definitions.DoubleTpe) => classOf[Array[Double]]
-            case t if isSubtype(t, definitions.FloatTpe) => classOf[Array[Float]]
-            case t if isSubtype(t, definitions.ShortTpe) => classOf[Array[Short]]
-            case t if isSubtype(t, definitions.ByteTpe) => classOf[Array[Byte]]
-            case t if isSubtype(t, definitions.BooleanTpe) => classOf[Array[Boolean]]
-            case t if isSubtype(t, localTypeOf[Array[Byte]]) => classOf[Array[Array[Byte]]]
-            case t if isSubtype(t, localTypeOf[CalendarInterval]) => classOf[Array[CalendarInterval]]
-            case t if isSubtype(t, localTypeOf[Decimal]) => classOf[Array[Decimal]]
-            case other => {
-                // There is probably a better way to do this, but I couldn't find it...
-                val elementType = dataTypeFor(other).asInstanceOf[ObjectType].cls
-                java.lang.reflect.Array.newInstance(elementType, 0).getClass
-            }
-
-        }
-        ObjectType(cls)
-    }
-
-    /**
-     * Returns true if the value of this data type is same between internal and external.
-     */
-    def isNativeType(dt: DataType): Boolean = dt match {
-        case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType |
-             FloatType | DoubleType | BinaryType | CalendarIntervalType => {
-            true
-        }
-        case _ => false
-    }
-
-    private def baseType(tpe: `Type`): `Type` = {
-        tpe.dealias match {
-            case annotatedType: AnnotatedType => annotatedType.underlying
-            case other => other
-        }
-    }
-
-    /**
-     * Returns an expression that can be used to deserialize a Spark SQL representation to an object
-     * of type `T` with a compatible schema. The Spark SQL representation is located at ordinal 0 of
-     * a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed using
-     * `UnresolvedExtractValue`.
-     *
-     * The returned expression is used by `ExpressionEncoder`. The encoder will resolve and bind this
-     * deserializer expression when using it.
-     */
-    def deserializerForType(tpe: `Type`): Expression = {
-        val clsName = getClassNameFromType(tpe)
-        val walkedTypePath = WalkedTypePath().recordRoot(clsName)
-        val Schema(dataType, nullable) = schemaFor(tpe)
-
-        // Assumes we are deserializing the first column of a row.
-        deserializerForWithNullSafetyAndUpcast(
-            GetColumnByOrdinal(0, dataType), dataType,
-            nullable = nullable, walkedTypePath,
-            (casted, typePath) => deserializerFor(tpe, casted, typePath)
-        )
-    }
-
-
-    /**
-     * Returns an expression that can be used to deserialize an input expression to an object of type
-     * `T` with a compatible schema.
-     *
-     * @param tpe            The `Type` of deserialized object.
-     * @param path           The expression which can be used to extract serialized value.
-     * @param walkedTypePath The paths from top to bottom to access current field when deserializing.
-     */
-    private def deserializerFor(
-                                   tpe: `Type`,
-                                   path: Expression,
-                                   walkedTypePath: WalkedTypePath,
-                                   predefinedDt: Option[DataTypeWithClass] = None
-                               ): Expression = cleanUpReflectionObjects {
-        baseType(tpe) match {
-
-            //<editor-fold desc="Description">
-            case t if (
-                try {
-                    !dataTypeFor(t).isInstanceOf[ObjectType]
-                } catch {
-                    case _: Throwable => false
-                }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => {
-                path
-            }
-
-            case t if isSubtype(t, localTypeOf[java.lang.Integer]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer])
-            }
-            case t if isSubtype(t, localTypeOf[Int]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer])
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Long]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long])
-            }
-            case t if isSubtype(t, localTypeOf[Long]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long])
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Double]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double])
-            }
-            case t if isSubtype(t, localTypeOf[Double]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double])
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Float]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float])
-            }
-            case t if isSubtype(t, localTypeOf[Float]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float])
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Short]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short])
-            }
-            case t if isSubtype(t, localTypeOf[Short]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short])
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Byte]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte])
-            }
-            case t if isSubtype(t, localTypeOf[Byte]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte])
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean])
-            }
-            case t if isSubtype(t, localTypeOf[Boolean]) => {
-                createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean])
-            }
-            case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => {
-                createDeserializerForLocalDate(path)
-            }
-            case t if isSubtype(t, localTypeOf[java.sql.Date]) => {
-                createDeserializerForSqlDate(path)
-            } //</editor-fold>
-
-            case t if isSubtype(t, localTypeOf[java.time.Instant]) => {
-                createDeserializerForInstant(path)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => {
-                createDeserializerForTypesSupportValueOf(
-                    Invoke(path, "toString", ObjectType(classOf[String]), returnNullable = false),
-                    getClassFromType(t),
-                )
-            }
-            case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => {
-                createDeserializerForSqlTimestamp(path)
-            }
-            case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => {
-                //#if sparkMinor >= 3.2
-                createDeserializerForLocalDateTime(path)
-                //#else
-                //$throw new IllegalArgumentException("TimestampNTZType is supported in spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Duration]) => {
-                //#if sparkMinor >= 3.2
-                createDeserializerForDuration(path)
-                //#else
-                //$throw new IllegalArgumentException("java.time.Duration is supported in spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Period]) => {
-                //#if sparkMinor >= 3.2
-                createDeserializerForPeriod(path)
-                //#else
-                //$throw new IllegalArgumentException("java.time.Period is supported in spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.String]) => {
-                createDeserializerForString(path, returnNullable = false)
-            }
-            case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => {
-                createDeserializerForJavaBigDecimal(path, returnNullable = false)
-            }
-            case t if isSubtype(t, localTypeOf[BigDecimal]) => {
-                createDeserializerForScalaBigDecimal(path, returnNullable = false)
-            }
-            case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => {
-                createDeserializerForJavaBigInteger(path, returnNullable = false)
-            }
-            case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => {
-                createDeserializerForScalaBigInt(path)
-            }
-
-            case t if isSubtype(t, localTypeOf[Array[_]]) => {
-                var TypeRef(_, _, Seq(elementType)) = t
-                if (predefinedDt.isDefined && !elementType.dealias.typeSymbol.isClass)
-                    elementType = getType(predefinedDt.get.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType]
-                        .elementType.asInstanceOf[DataTypeWithClass].cls
-                    )
-                val Schema(dataType, elementNullable) = predefinedDt.map { it =>
-                    val elementInfo = it.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType
-                        .asInstanceOf[DataTypeWithClass]
-                    Schema(elementInfo.dt, elementInfo.nullable)
-                }.getOrElse(schemaFor(elementType))
-                val className = getClassNameFromType(elementType)
-                val newTypePath = walkedTypePath.recordArray(className)
-
-                val mapFunction: Expression => Expression = element => {
-                    // upcast the array element to the data type the encoder expected.
-                    deserializerForWithNullSafetyAndUpcast(
-                        element,
-                        dataType,
-                        nullable = elementNullable,
-                        newTypePath,
-                        (casted, typePath) => deserializerFor(
-                            tpe = elementType,
-                            path = casted,
-                            walkedTypePath = typePath,
-                            predefinedDt = predefinedDt
-                                .map(_.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType)
-                                .filter(_.isInstanceOf[ComplexWrapper])
-                                .map(_.asInstanceOf[ComplexWrapper])
-                        )
-                    )
-                }
-
-                val arrayData = UnresolvedMapObjects(mapFunction, path)
-                val arrayCls = arrayClassFor(elementType)
-
-                val methodName = elementType match {
-                    case t if isSubtype(t, definitions.IntTpe) => "toIntArray"
-                    case t if isSubtype(t, definitions.LongTpe) => "toLongArray"
-                    case t if isSubtype(t, definitions.DoubleTpe) => "toDoubleArray"
-                    case t if isSubtype(t, definitions.FloatTpe) => "toFloatArray"
-                    case t if isSubtype(t, definitions.ShortTpe) => "toShortArray"
-                    case t if isSubtype(t, definitions.ByteTpe) => "toByteArray"
-                    case t if isSubtype(t, definitions.BooleanTpe) => "toBooleanArray"
-                    // non-primitive
-                    case _ => "array"
-                }
-                Invoke(arrayData, methodName, arrayCls, returnNullable = false)
-            }
-
-            // We serialize a `Set` to Catalyst array. When we deserialize a Catalyst array
-            // to a `Set`, if there are duplicated elements, the elements will be de-duplicated.
-
-            case t if isSubtype(t, localTypeOf[Map[_, _]]) => {
-                val TypeRef(_, _, Seq(keyType, valueType)) = t
-
-                val classNameForKey = getClassNameFromType(keyType)
-                val classNameForValue = getClassNameFromType(valueType)
-
-                val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue)
-
-                UnresolvedCatalystToExternalMap(
-                    path,
-                    p => deserializerFor(keyType, p, newTypePath),
-                    p => deserializerFor(valueType, p, newTypePath),
-                    mirror.runtimeClass(t.typeSymbol.asClass)
-                )
-            }
-
-            case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => {
-                createDeserializerForTypesSupportValueOf(
-                    createDeserializerForString(path, returnNullable = false),
-                    Class.forName(t.toString),
-                )
-            }
-            case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => {
-                val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt().
-                    getConstructor().newInstance()
-                val obj = NewInstance(
-                    udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt(),
-                    Nil,
-                    dataType = ObjectType(udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt())
-                )
-                Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil)
-            }
-
-            case t if UDTRegistration.exists(getClassNameFromType(t)) => {
-                val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor().
-                    newInstance().asInstanceOf[UserDefinedType[_]]
-                val obj = NewInstance(
-                    udt.getClass,
-                    Nil,
-                    dataType = ObjectType(udt.getClass)
-                )
-                Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil)
-            }
-
-            case _ if predefinedDt.isDefined => {
-                predefinedDt.get match {
-
-                    case wrapper: KDataTypeWrapper => {
-                        val structType = wrapper.dt
-                        val cls = wrapper.cls
-                        val arguments = structType
-                            .fields
-                            .map { field =>
-                                val dataType = field.dataType.asInstanceOf[DataTypeWithClass]
-                                val nullable = dataType.nullable
-                                val clsName = getClassNameFromType(getType(dataType.cls))
-                                val newTypePath = walkedTypePath.recordField(clsName, field.name)
-
-                                // For tuples, we based grab the inner fields by ordinal instead of name.
-                                val newPath = deserializerFor(
-                                    tpe = getType(dataType.cls),
-                                    path = addToPath(path, field.name, dataType.dt, newTypePath),
-                                    walkedTypePath = newTypePath,
-                                    predefinedDt = Some(dataType).filter(_.isInstanceOf[ComplexWrapper])
-                                )
-                                expressionWithNullSafety(
-                                    newPath,
-                                    nullable = nullable,
-                                    newTypePath
-                                )
-                            }
-                        val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false)
-
-                        org.apache.spark.sql.catalyst.expressions.If(
-                            IsNull(path),
-                            org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)),
-                            newInstance
-                        )
-                    }
-
-                    case t: ComplexWrapper => {
-
-                        t.dt match {
-                            case MapType(kt, vt, _) => {
-                                val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls)
-                                    .map(getType(_))
-                                val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass])
-                                val classNameForKey = getClassNameFromType(keyType)
-                                val classNameForValue = getClassNameFromType(valueType)
-
-                                val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue)
-
-                                val keyData =
-                                    Invoke(
-                                        UnresolvedMapObjects(
-                                            p => deserializerFor(
-                                                keyType, p, newTypePath, Some(keyDT)
-                                                    .filter(_.isInstanceOf[ComplexWrapper])
-                                            ),
-                                            MapKeys(path)
-                                        ),
-                                        "array",
-                                        ObjectType(classOf[Array[Any]])
-                                    )
-
-                                val valueData =
-                                    Invoke(
-                                        UnresolvedMapObjects(
-                                            p => deserializerFor(
-                                                valueType, p, newTypePath, Some(valueDT)
-                                                    .filter(_.isInstanceOf[ComplexWrapper])
-                                            ),
-                                            MapValues(path)
-                                        ),
-                                        "array",
-                                        ObjectType(classOf[Array[Any]])
-                                    )
-
-                                StaticInvoke(
-                                    ArrayBasedMapData.getClass,
-                                    ObjectType(classOf[java.util.Map[_, _]]),
-                                    "toJavaMap",
-                                    keyData :: valueData :: Nil,
-                                    returnNullable = false
-                                )
-                            }
-
-                            case ArrayType(elementType, containsNull) => {
-                                val dataTypeWithClass = elementType.asInstanceOf[DataTypeWithClass]
-                                val mapFunction: Expression => Expression = element => {
-                                    // upcast the array element to the data type the encoder expected.
-                                    val et = getType(dataTypeWithClass.cls)
-                                    val className = getClassNameFromType(et)
-                                    val newTypePath = walkedTypePath.recordArray(className)
-                                    deserializerForWithNullSafetyAndUpcast(
-                                        element,
-                                        dataTypeWithClass.dt,
-                                        nullable = dataTypeWithClass.nullable,
-                                        newTypePath,
-                                        (casted, typePath) => {
-                                            deserializerFor(
-                                                et, casted, typePath, Some(dataTypeWithClass)
-                                                    .filter(_.isInstanceOf[ComplexWrapper])
-                                                    .map(_.asInstanceOf[ComplexWrapper])
-                                            )
-                                        }
-                                    )
-                                }
-
-                                UnresolvedMapObjects(mapFunction, path, customCollectionCls = Some(t.cls))
-                            }
-
-                            case StructType(elementType: Array[StructField]) => {
-                                val cls = t.cls
-
-                                val arguments = elementType.map { field =>
-                                    val dataType = field.dataType.asInstanceOf[DataTypeWithClass]
-                                    val nullable = dataType.nullable
-                                    val clsName = getClassNameFromType(getType(dataType.cls))
-                                    val newTypePath = walkedTypePath.recordField(clsName, field.name)
-
-                                    // For tuples, we based grab the inner fields by ordinal instead of name.
-                                    val newPath = deserializerFor(
-                                        getType(dataType.cls),
-                                        addToPath(path, field.name, dataType.dt, newTypePath),
-                                        newTypePath,
-                                        Some(dataType).filter(_.isInstanceOf[ComplexWrapper])
-                                    )
-                                    expressionWithNullSafety(
-                                        newPath,
-                                        nullable = nullable,
-                                        newTypePath
-                                    )
-                                }
-                                val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false)
-
-                                org.apache.spark.sql.catalyst.expressions.If(
-                                    IsNull(path),
-                                    org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)),
-                                    newInstance
-                                )
-                            }
-
-                            case _ => {
-                                throw new UnsupportedOperationException(
-                                    s"No Encoder found for $tpe\n" + walkedTypePath
-                                )
-                            }
-                        }
-                    }
-                }
-            }
-
-            case t if definedByConstructorParams(t) => {
-                val params = getConstructorParameters(t)
-
-                val cls = getClassFromType(tpe)
-
-                val arguments = params.zipWithIndex.map { case ((fieldName, fieldType), i) =>
-                    val Schema(dataType, nullable) = schemaFor(fieldType)
-                    val clsName = getClassNameFromType(fieldType)
-                    val newTypePath = walkedTypePath.recordField(clsName, fieldName)
-
-                    // For tuples, we based grab the inner fields by ordinal instead of name.
-                    val newPath = if (cls.getName startsWith "scala.Tuple") {
-                        deserializerFor(
-                            fieldType,
-                            addToPathOrdinal(path, i, dataType, newTypePath),
-                            newTypePath
-                        )
-                    } else {
-                        deserializerFor(
-                            fieldType,
-                            addToPath(path, fieldName, dataType, newTypePath),
-                            newTypePath
-                        )
-                    }
-                    expressionWithNullSafety(
-                        newPath,
-                        nullable = nullable,
-                        newTypePath
-                    )
-                }
-
-                val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false)
-
-                org.apache.spark.sql.catalyst.expressions.If(
-                    IsNull(path),
-                    org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)),
-                    newInstance
-                )
-            }
-
-            case _ => {
-                throw new UnsupportedOperationException(
-                    s"No Encoder found for $tpe\n" + walkedTypePath
-                )
-            }
-        }
-    }
-
-    /**
-     * Returns an expression for serializing an object of type T to Spark SQL representation. The
-     * input object is located at ordinal 0 of a row, i.e., `BoundReference(0, _)`.
-     *
-     * If the given type is not supported, i.e. there is no encoder can be built for this type,
-     * an [[UnsupportedOperationException]] will be thrown with detailed error message to explain
-     * the type path walked so far and which class we are not supporting.
-     * There are 4 kinds of type path:
-     * * the root type: `root class: "abc.xyz.MyClass"`
-     * * the value type of [[Option]]: `option value class: "abc.xyz.MyClass"`
-     * * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"`
-     * * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")`
-     */
-    def serializerForType(tpe: `Type`): Expression = ScalaReflection.cleanUpReflectionObjects {
-        val clsName = getClassNameFromType(tpe)
-        val walkedTypePath = WalkedTypePath().recordRoot(clsName)
-
-        // The input object to `ExpressionEncoder` is located at first column of an row.
-        val isPrimitive = tpe.typeSymbol.asClass.isPrimitive
-        val inputObject = BoundReference(0, dataTypeFor(tpe), nullable = !isPrimitive)
-
-        serializerFor(inputObject, tpe, walkedTypePath)
-    }
-
-    def getType[T](clazz: Class[T]): universe.Type = {
-        clazz match {
-            case _ if clazz == classOf[Array[Byte]] => localTypeOf[Array[Byte]]
-            case _ => {
-                val mir = runtimeMirror(clazz.getClassLoader)
-                mir.classSymbol(clazz).toType
-            }
-        }
-
-    }
-
-    def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = {
-        val tpe = getType(cls)
-        val clsName = getClassNameFromType(tpe)
-        val walkedTypePath = WalkedTypePath().recordRoot(clsName)
-
-        // Assumes we are deserializing the first column of a row.
-        deserializerForWithNullSafetyAndUpcast(
-            GetColumnByOrdinal(0, dt.dt),
-            dt.dt,
-            nullable = dt.nullable,
-            walkedTypePath,
-            (casted, typePath) => deserializerFor(tpe, casted, typePath, Some(dt))
-        )
-    }
-
-
-    def serializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = {
-        val tpe = getType(cls)
-        val clsName = getClassNameFromType(tpe)
-        val walkedTypePath = WalkedTypePath().recordRoot(clsName)
-        val inputObject = BoundReference(0, ObjectType(cls), nullable = true)
-        serializerFor(inputObject, tpe, walkedTypePath, predefinedDt = Some(dt))
-    }
-
-    /**
-     * Returns an expression for serializing the value of an input expression into Spark SQL
-     * internal representation.
-     */
-    private def serializerFor(
-                                 inputObject: Expression,
-                                 tpe: `Type`,
-                                 walkedTypePath: WalkedTypePath,
-                                 seenTypeSet: Set[`Type`] = Set.empty,
-                                 predefinedDt: Option[DataTypeWithClass] = None,
-                             ): Expression = cleanUpReflectionObjects {
-
-        def toCatalystArray(
-                               input: Expression,
-                               elementType: `Type`,
-                               predefinedDt: Option[DataTypeWithClass] = None,
-                           ): Expression = {
-            val dataType = predefinedDt
-                .map(_.dt)
-                .getOrElse {
-                    dataTypeFor(elementType)
-                }
-
-            dataType match {
-
-                case dt@(MapType(_, _, _) | ArrayType(_, _) | StructType(_)) => {
-                    val clsName = getClassNameFromType(elementType)
-                    val newPath = walkedTypePath.recordArray(clsName)
-                    createSerializerForMapObjects(
-                        input, ObjectType(predefinedDt.get.cls),
-                        serializerFor(_, elementType, newPath, seenTypeSet, predefinedDt)
-                    )
-                }
-
-                case dt: ObjectType => {
-                    val clsName = getClassNameFromType(elementType)
-                    val newPath = walkedTypePath.recordArray(clsName)
-                    createSerializerForMapObjects(
-                        input, dt,
-                        serializerFor(_, elementType, newPath, seenTypeSet)
-                    )
-                }
-
-                //                case dt: ByteType =>
-                //                    createSerializerForPrimitiveArray(input, dt)
-
-                case dt@(BooleanType | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType) => {
-                    val cls = input.dataType.asInstanceOf[ObjectType].cls
-                    if (cls.isArray && cls.getComponentType.isPrimitive) {
-                        createSerializerForPrimitiveArray(input, dt)
-                    } else {
-                        createSerializerForGenericArray(
-                            inputObject = input,
-                            dataType = dt,
-                            nullable = predefinedDt
-                                .map(_.nullable)
-                                .getOrElse(
-                                    schemaFor(elementType).nullable
-                                ),
-                        )
-                    }
-                }
-
-                case _: StringType => {
-                    val clsName = getClassNameFromType(typeOf[String])
-                    val newPath = walkedTypePath.recordArray(clsName)
-                    createSerializerForMapObjects(
-                        input, ObjectType(Class.forName(getClassNameFromType(elementType))),
-                        serializerFor(_, elementType, newPath, seenTypeSet)
-                    )
-                }
-
-                case dt => {
-                    createSerializerForGenericArray(
-                        inputObject = input,
-                        dataType = dt,
-                        nullable = predefinedDt
-                            .map(_.nullable)
-                            .getOrElse {
-                                schemaFor(elementType).nullable
-                            },
-                    )
-                }
-            }
-        }
-
-        baseType(tpe) match {
-
-            //<editor-fold desc="scala-like">
-            case _ if !inputObject.dataType.isInstanceOf[ObjectType] &&
-                !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => {
-                inputObject
-            }
-            case t if isSubtype(t, localTypeOf[Option[_]]) => {
-                val TypeRef(_, _, Seq(optType)) = t
-                val className = getClassNameFromType(optType)
-                val newPath = walkedTypePath.recordOption(className)
-                val unwrapped = UnwrapOption(dataTypeFor(optType), inputObject)
-                serializerFor(unwrapped, optType, newPath, seenTypeSet)
-            }
-
-            // Since List[_] also belongs to localTypeOf[Product], we put this case before
-            // "case t if definedByConstructorParams(t)" to make sure it will match to the
-            // case "localTypeOf[Seq[_]]"
-            case t if isSubtype(t, localTypeOf[Seq[_]]) => {
-                val TypeRef(_, _, Seq(elementType)) = t
-                toCatalystArray(inputObject, elementType)
-            }
-
-            case t if isSubtype(t, localTypeOf[Array[_]]) && predefinedDt.isEmpty => {
-                val TypeRef(_, _, Seq(elementType)) = t
-                toCatalystArray(inputObject, elementType)
-            }
-
-            case t if isSubtype(t, localTypeOf[Map[_, _]]) => {
-                val TypeRef(_, _, Seq(keyType, valueType)) = t
-                val keyClsName = getClassNameFromType(keyType)
-                val valueClsName = getClassNameFromType(valueType)
-                val keyPath = walkedTypePath.recordKeyForMap(keyClsName)
-                val valuePath = walkedTypePath.recordValueForMap(valueClsName)
-
-                createSerializerForMap(
-                    inputObject,
-                    MapElementInformation(
-                        dataTypeFor(keyType),
-                        nullable = !keyType.typeSymbol.asClass.isPrimitive,
-                        serializerFor(_, keyType, keyPath, seenTypeSet)
-                    ),
-                    MapElementInformation(
-                        dataTypeFor(valueType),
-                        nullable = !valueType.typeSymbol.asClass.isPrimitive,
-                        serializerFor(_, valueType, valuePath, seenTypeSet)
-                    )
-                )
-            }
-
-            case t if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => {
-                val TypeRef(_, _, Seq(elementType)) = t
-
-                // There's no corresponding Catalyst type for `Set`, we serialize a `Set` to Catalyst array.
-                // Note that the property of `Set` is only kept when manipulating the data as domain object.
-                val newInput =
-                Invoke(
-                    inputObject,
-                    "toSeq",
-                    ObjectType(classOf[Seq[_]])
-                )
-
-                toCatalystArray(newInput, elementType)
-            }
-
-            case t if isSubtype(t, localTypeOf[String]) => {
-                createSerializerForString(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Instant]) => {
-                createSerializerForJavaInstant(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => {
-                createSerializerForSqlTimestamp(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => {
-                //#if sparkMinor >= 3.2
-                createSerializerForLocalDateTime(inputObject)
-                //#else
-                //$throw new IllegalArgumentException("TimestampNTZType is supported in spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => {
-                createSerializerForJavaLocalDate(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.sql.Date]) => {
-                createSerializerForSqlDate(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Duration]) => {
-                //#if sparkMinor >= 3.2
-                createSerializerForJavaDuration(inputObject)
-                //#else
-                //$throw new IllegalArgumentException("java.time.Duration is supported in spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Period]) => {
-                //#if sparkMinor >= 3.2
-                createSerializerForJavaPeriod(inputObject)
-                //#else
-                //$throw new IllegalArgumentException("java.time.Period is supported in spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[BigDecimal]) => {
-                createSerializerForScalaBigDecimal(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => {
-                createSerializerForJavaBigDecimal(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => {
-                createSerializerForJavaBigInteger(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => {
-                createSerializerForScalaBigInt(inputObject)
-            }
-
-            case t if isSubtype(t, localTypeOf[java.lang.Integer]) => {
-                createSerializerForInteger(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Int]) => {
-                createSerializerForInteger(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Long]) => {
-                createSerializerForLong(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Long]) => {
-                createSerializerForLong(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Double]) => {
-                createSerializerForDouble(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Double]) => {
-                createSerializerForDouble(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Float]) => {
-                createSerializerForFloat(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Float]) => {
-                createSerializerForFloat(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Short]) => {
-                createSerializerForShort(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Short]) => {
-                createSerializerForShort(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Byte]) => {
-                createSerializerForByte(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Byte]) => {
-                createSerializerForByte(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => {
-                createSerializerForBoolean(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[Boolean]) => {
-                createSerializerForBoolean(inputObject)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => {
-                createSerializerForString(
-                    Invoke(inputObject, "name", ObjectType(classOf[String]), returnNullable = false)
-                )
-            }
-            case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => {
-                val udt = getClassFromType(t)
-                    .getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance()
-                val udtClass = udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt()
-                createSerializerForUserDefinedType(inputObject, udt, udtClass)
-            }
-
-            case t if UDTRegistration.exists(getClassNameFromType(t)) => {
-                val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor().
-                    newInstance().asInstanceOf[UserDefinedType[_]]
-                val udtClass = udt.getClass
-                createSerializerForUserDefinedType(inputObject, udt, udtClass)
-            }
-            //</editor-fold>
-
-            // Kotlin specific cases
-            case t if predefinedDt.isDefined => {
-
-                //                if (seenTypeSet.contains(t)) {
-                //                    throw new UnsupportedOperationException(
-                //                        s"cannot have circular references in class, but got the circular reference of class $t"
-                //                    )
-                //                }
-
-                predefinedDt.get match {
-
-                    // Kotlin data class
-                    case dataType: KDataTypeWrapper => {
-                        val cls = dataType.cls
-                        val properties = getJavaBeanReadableProperties(cls)
-                        val structFields = dataType.dt.fields.map(_.asInstanceOf[KStructField])
-                        val fields: Array[(String, Expression)] = structFields.map { structField =>
-                            val maybeProp = properties.find {
-                                _.getName == structField.getterName
-                            }
-                            if (maybeProp.isEmpty)
-                                throw new IllegalArgumentException(
-                                    s"Field ${structField.name} is not found among available props, which are: ${properties.map(_.getName).mkString(", ")}"
-                                )
-                            val fieldName = structField.name
-                            val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls
-                            val propDt = structField.dataType.asInstanceOf[DataTypeWithClass]
-
-                            val fieldValue = Invoke(
-                                inputObject,
-                                maybeProp.get.getName,
-                                inferExternalType(propClass),
-                                returnNullable = structField.nullable
-                            )
-                            val newPath = walkedTypePath.recordField(propClass.getName, fieldName)
-
-                            val tpe = getType(propClass)
-
-                            val serializer = serializerFor(
-                                inputObject = fieldValue,
-                                tpe = tpe,
-                                walkedTypePath = newPath,
-                                seenTypeSet = seenTypeSet,
-                                predefinedDt = if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None
-                            )
-
-                            (fieldName, serializer)
-                        }
-                        createSerializerForObject(inputObject, fields)
-                    }
-
-                    case otherTypeWrapper: ComplexWrapper => {
-
-                        otherTypeWrapper.dt match {
-
-                            case MapType(kt, vt, _) => {
-                                val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls)
-                                    .map(getType(_))
-                                val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass])
-                                val keyClsName = getClassNameFromType(keyType)
-                                val valueClsName = getClassNameFromType(valueType)
-                                val keyPath = walkedTypePath.recordKeyForMap(keyClsName)
-                                val valuePath = walkedTypePath.recordValueForMap(valueClsName)
-
-                                createSerializerForMap(
-                                    inputObject,
-                                    MapElementInformation(
-                                        dataTypeFor(keyType),
-                                        nullable = !keyType.typeSymbol.asClass.isPrimitive,
-                                        serializerFor(
-                                            _, keyType, keyPath, seenTypeSet, Some(keyDT)
-                                                .filter(_.isInstanceOf[ComplexWrapper])
-                                        )
-                                    ),
-                                    MapElementInformation(
-                                        dataTypeFor(valueType),
-                                        nullable = !valueType.typeSymbol.asClass.isPrimitive,
-                                        serializerFor(
-                                            _, valueType, valuePath, seenTypeSet, Some(valueDT)
-                                                .filter(_.isInstanceOf[ComplexWrapper])
-                                        )
-                                    )
-                                )
-                            }
-
-                            case ArrayType(elementType, _) => {
-                                toCatalystArray(
-                                    inputObject,
-                                    getType(elementType.asInstanceOf[DataTypeWithClass].cls
-                                    ), Some(elementType.asInstanceOf[DataTypeWithClass])
-                                )
-                            }
-
-                            case StructType(elementType: Array[StructField]) => {
-                                val cls = otherTypeWrapper.cls
-                                val names = elementType.map(_.name)
-
-                                val beanInfo = Introspector.getBeanInfo(cls)
-                                val methods = beanInfo.getMethodDescriptors.filter(it => names.contains(it.getName))
-
-
-                                val fields = elementType.map { structField =>
-
-                                    val maybeProp = methods.find(it => it.getName == structField.name)
-                                    if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${
-                                        structField.name
-                                    } is not found among available props, which are: ${
-                                        methods.map(_.getName).mkString(", ")
-                                    }"
-                                    )
-                                    val fieldName = structField.name
-                                    val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls
-                                    val propDt = structField.dataType.asInstanceOf[DataTypeWithClass]
-                                    val fieldValue = Invoke(
-                                        inputObject,
-                                        maybeProp.get.getName,
-                                        inferExternalType(propClass),
-                                        returnNullable = propDt.nullable
-                                    )
-                                    val newPath = walkedTypePath.recordField(propClass.getName, fieldName)
-                                    (fieldName, serializerFor(
-                                        fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt
-                                            .isInstanceOf[ComplexWrapper]) Some(propDt) else None
-                                    ))
-
-                                }
-                                createSerializerForObject(inputObject, fields)
-                            }
-
-                            case _ => {
-                                throw new UnsupportedOperationException(
-                                    s"No Encoder found for $tpe\n" + walkedTypePath
-                                )
-                            }
-                        }
-                    }
-                }
-            }
-
-            case t if definedByConstructorParams(t) => {
-                if (seenTypeSet.contains(t)) {
-                    throw new UnsupportedOperationException(
-                        s"cannot have circular references in class, but got the circular reference of class $t"
-                    )
-                }
-
-                val params = getConstructorParameters(t)
-                val fields = params.map { case (fieldName, fieldType) =>
-                    if (javaKeywords.contains(fieldName)) {
-                        throw new UnsupportedOperationException(s"`$fieldName` is a reserved keyword and " +
-                            "cannot be used as field name\n" + walkedTypePath
-                        )
-                    }
-
-                    // SPARK-26730 inputObject won't be null with If's guard below. And KnownNotNul
-                    // is necessary here. Because for a nullable nested inputObject with struct data
-                    // type, e.g. StructType(IntegerType, StringType), it will return nullable=true
-                    // for IntegerType without KnownNotNull. And that's what we do not expect to.
-                    val fieldValue = Invoke(
-                        KnownNotNull(inputObject), fieldName, dataTypeFor(fieldType),
-                        returnNullable = !fieldType.typeSymbol.asClass.isPrimitive
-                    )
-                    val clsName = getClassNameFromType(fieldType)
-                    val newPath = walkedTypePath.recordField(clsName, fieldName)
-                    (fieldName, serializerFor(fieldValue, fieldType, newPath, seenTypeSet + t))
-                }
-                createSerializerForObject(inputObject, fields)
-            }
-
-            case _ => {
-                throw new UnsupportedOperationException(
-                    s"No Encoder found for $tpe\n" + walkedTypePath
-                )
-            }
-        }
-    }
-
-    def createDeserializerForString(path: Expression, returnNullable: Boolean): Expression = {
-        Invoke(
-            path, "toString", ObjectType(classOf[java.lang.String]),
-            returnNullable = returnNullable
-        )
-    }
-
-    def getJavaBeanReadableProperties(beanClass: Class[_]): Array[Method] = {
-        val beanInfo = Introspector.getBeanInfo(beanClass)
-        beanInfo
-            .getMethodDescriptors
-            .filter { it => it.getName.startsWith("is") || it.getName.startsWith("get") }
-            .filterNot { _.getName == "getClass" }
-            .filterNot { _.getName == "getDeclaringClass" }
-            .map { _.getMethod }
-    }
-
-    /*
-     * Retrieves the runtime class corresponding to the provided type.
-     */
-    def getClassFromType(tpe: Type): Class[_] = mirror.runtimeClass(tpe.dealias.typeSymbol.asClass)
-
-    case class Schema(dataType: DataType, nullable: Boolean)
-
-    /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */
-    def schemaFor(tpe: `Type`): Schema = cleanUpReflectionObjects {
-
-        baseType(tpe) match {
-            // this must be the first case, since all objects in scala are instances of Null, therefore
-            // Null type would wrongly match the first of them, which is Option as of now
-            case t if isSubtype(t, definitions.NullTpe) => Schema(NullType, nullable = true)
-
-            case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => {
-                val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt().
-                    getConstructor().newInstance()
-                Schema(udt, nullable = true)
-            }
-            case t if UDTRegistration.exists(getClassNameFromType(t)) => {
-                val udt = UDTRegistration
-                    .getUDTFor(getClassNameFromType(t))
-                    .get
-                    .getConstructor()
-                    .newInstance()
-                    .asInstanceOf[UserDefinedType[_]]
-                Schema(udt, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[Option[_]]) => {
-                val TypeRef(_, _, Seq(optType)) = t
-                Schema(schemaFor(optType).dataType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[Array[Byte]]) => {
-                Schema(BinaryType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[Array[_]]) => {
-                val TypeRef(_, _, Seq(elementType)) = t
-                val Schema(dataType, nullable) = schemaFor(elementType)
-                Schema(ArrayType(dataType, containsNull = nullable), nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[Seq[_]]) => {
-                val TypeRef(_, _, Seq(elementType)) = t
-                val Schema(dataType, nullable) = schemaFor(elementType)
-                Schema(ArrayType(dataType, containsNull = nullable), nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[Map[_, _]]) => {
-                val TypeRef(_, _, Seq(keyType, valueType)) = t
-                val Schema(valueDataType, valueNullable) = schemaFor(valueType)
-                Schema(
-                    MapType(
-                        schemaFor(keyType).dataType,
-                        valueDataType, valueContainsNull = valueNullable
-                    ), nullable = true
-                )
-            }
-            case t if isSubtype(t, localTypeOf[Set[_]]) => {
-                val TypeRef(_, _, Seq(elementType)) = t
-                val Schema(dataType, nullable) = schemaFor(elementType)
-                Schema(ArrayType(dataType, containsNull = nullable), nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[String]) => {
-                Schema(StringType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Instant]) => {
-                Schema(TimestampType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => {
-                Schema(TimestampType, nullable = true)
-            }
-            // SPARK-36227: Remove TimestampNTZ type support in Spark 3.2 with minimal code changes.
-            case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) && Utils.isTesting => {
-                //#if sparkMinor >= 3.2
-                Schema(TimestampNTZType, nullable = true)
-                //#else
-                //$throw new IllegalArgumentException("java.time.LocalDateTime is supported in Spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => {
-                Schema(DateType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.sql.Date]) => {
-                Schema(DateType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[CalendarInterval]) => {
-                Schema(CalendarIntervalType, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Duration]) => {
-                //#if sparkMinor >= 3.2
-                Schema(DayTimeIntervalType(), nullable = true)
-                //#else
-                //$throw new IllegalArgumentException("DayTimeIntervalType for java.time.Duration is supported in Spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[java.time.Period]) => {
-                //#if sparkMinor >= 3.2
-                Schema(YearMonthIntervalType(), nullable = true)
-                //#else
-                //$throw new IllegalArgumentException("YearMonthIntervalType for java.time.Period is supported in Spark 3.2+")
-                //#endif
-            }
-            case t if isSubtype(t, localTypeOf[BigDecimal]) => {
-                Schema(DecimalType.SYSTEM_DEFAULT, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => {
-                Schema(DecimalType.SYSTEM_DEFAULT, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => {
-                Schema(DecimalType.BigIntDecimal, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => {
-                Schema(DecimalType.BigIntDecimal, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[Decimal]) => {
-                Schema(DecimalType.SYSTEM_DEFAULT, nullable = true)
-            }
-            case t if isSubtype(t, localTypeOf[java.lang.Integer]) => Schema(IntegerType, nullable = true)
-            case t if isSubtype(t, localTypeOf[java.lang.Long]) => Schema(LongType, nullable = true)
-            case t if isSubtype(t, localTypeOf[java.lang.Double]) => Schema(DoubleType, nullable = true)
-            case t if isSubtype(t, localTypeOf[java.lang.Float]) => Schema(FloatType, nullable = true)
-            case t if isSubtype(t, localTypeOf[java.lang.Short]) => Schema(ShortType, nullable = true)
-            case t if isSubtype(t, localTypeOf[java.lang.Byte]) => Schema(ByteType, nullable = true)
-            case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => Schema(BooleanType, nullable = true)
-            case t if isSubtype(t, definitions.IntTpe) => Schema(IntegerType, nullable = false)
-            case t if isSubtype(t, definitions.LongTpe) => Schema(LongType, nullable = false)
-            case t if isSubtype(t, definitions.DoubleTpe) => Schema(DoubleType, nullable = false)
-            case t if isSubtype(t, definitions.FloatTpe) => Schema(FloatType, nullable = false)
-            case t if isSubtype(t, definitions.ShortTpe) => Schema(ShortType, nullable = false)
-            case t if isSubtype(t, definitions.ByteTpe) => Schema(ByteType, nullable = false)
-            case t if isSubtype(t, definitions.BooleanTpe) => Schema(BooleanType, nullable = false)
-            case t if definedByConstructorParams(t) => {
-                val params = getConstructorParameters(t)
-                Schema(
-                    StructType(
-                        params.map { case (fieldName, fieldType) =>
-                            val Schema(dataType, nullable) = schemaFor(fieldType)
-                            StructField(fieldName, dataType, nullable)
-                        }
-                    ), nullable = true
-                )
-            }
-            case other => {
-                throw new UnsupportedOperationException(s"Schema for type $other is not supported")
-            }
-        }
-    }
-
-    /**
-     * Finds an accessible constructor with compatible parameters. This is a more flexible search than
-     * the exact matching algorithm in `Class.getConstructor`. The first assignment-compatible
-     * matching constructor is returned if it exists. Otherwise, we check for additional compatible
-     * constructors defined in the companion object as `apply` methods. Otherwise, it returns `None`.
-     */
-    def findConstructor[T](cls: Class[T], paramTypes: Seq[Class[_]]): Option[Seq[AnyRef] => T] = {
-        Option(ConstructorUtils.getMatchingAccessibleConstructor(cls, paramTypes: _*)) match {
-            case Some(c) => Some(x => c.newInstance(x: _*))
-            case None =>
-                val companion = mirror.staticClass(cls.getName).companion
-                val moduleMirror = mirror.reflectModule(companion.asModule)
-                val applyMethods = companion.asTerm.typeSignature
-                    .member(universe.TermName("apply")).asTerm.alternatives
-                applyMethods.find { method =>
-                    val params = method.typeSignature.paramLists.head
-                    // Check that the needed params are the same length and of matching types
-                    params.size == paramTypes.tail.size &&
-                        params.zip(paramTypes.tail).forall { case (ps, pc) =>
-                            ps.typeSignature.typeSymbol == mirror.classSymbol(pc)
-                        }
-                }.map { applyMethodSymbol =>
-                    val expectedArgsCount = applyMethodSymbol.typeSignature.paramLists.head.size
-                    val instanceMirror = mirror.reflect(moduleMirror.instance)
-                    val method = instanceMirror.reflectMethod(applyMethodSymbol.asMethod)
-                    (_args: Seq[AnyRef]) => {
-                        // Drop the "outer" argument if it is provided
-                        val args = if (_args.size == expectedArgsCount) _args else _args.tail
-                        method.apply(args: _*).asInstanceOf[T]
-                    }
-                }
-        }
-    }
-
-    /**
-     * Whether the fields of the given type is defined entirely by its constructor parameters.
-     */
-    def definedByConstructorParams(tpe: Type): Boolean = cleanUpReflectionObjects {
-        tpe.dealias match {
-            // `Option` is a `Product`, but we don't wanna treat `Option[Int]` as a struct type.
-            case t if isSubtype(t, localTypeOf[Option[_]]) => definedByConstructorParams(t.typeArgs.head)
-            case _ => {
-                isSubtype(tpe.dealias, localTypeOf[Product]) ||
-                    isSubtype(tpe.dealias, localTypeOf[DefinedByConstructorParams])
-            }
-        }
-    }
-
-    private val javaKeywords = Set(
-        "abstract", "assert", "boolean", "break", "byte", "case", "catch",
-        "char", "class", "const", "continue", "default", "do", "double", "else", "extends", "false",
-        "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int",
-        "interface", "long", "native", "new", "null", "package", "private", "protected", "public",
-        "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw",
-        "throws", "transient", "true", "try", "void", "volatile", "while"
-    )
-
-
-    @scala.annotation.tailrec
-    def javaBoxedType(dt: DataType): Class[_] = dt match {
-        case _: DecimalType => classOf[Decimal]
-        //#if sparkMinor >= 3.2
-        case _: DayTimeIntervalType => classOf[java.lang.Long]
-        case _: YearMonthIntervalType => classOf[java.lang.Integer]
-        //#endif
-        case BinaryType => classOf[Array[Byte]]
-        case StringType => classOf[UTF8String]
-        case CalendarIntervalType => classOf[CalendarInterval]
-        case _: StructType => classOf[InternalRow]
-        case _: ArrayType => classOf[ArrayType]
-        case _: MapType => classOf[MapType]
-        case udt: UserDefinedType[_] => javaBoxedType(udt.sqlType)
-        case ObjectType(cls) => cls
-        case _ => ScalaReflection.typeBoxedJavaMapping.getOrElse(dt, classOf[java.lang.Object])
-    }
-
-}
-
-/**
- * Support for generating catalyst schemas for scala objects.  Note that unlike its companion
- * object, this trait able to work in both the runtime and the compile time (macro) universe.
- */
-trait KotlinReflection extends Logging {
-    /** The universe we work in (runtime or macro) */
-    val universe: scala.reflect.api.Universe
-
-    /** The mirror used to access types in the universe */
-    def mirror: universe.Mirror
-
-    import universe._
-
-    // The Predef.Map is scala.collection.immutable.Map.
-    // Since the map values can be mutable, we explicitly import scala.collection.Map at here.
-
-    /**
-     * Any codes calling `scala.reflect.api.Types.TypeApi.<:<` should be wrapped by this method to
-     * clean up the Scala reflection garbage automatically. Otherwise, it will leak some objects to
-     * `scala.reflect.runtime.JavaUniverse.undoLog`.
-     *
-     * @see https://github.com/scala/bug/issues/8302
-     */
-    def cleanUpReflectionObjects[T](func: => T): T = {
-        universe.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.undo(func)
-    }
-
-    /**
-     * Return the Scala Type for `T` in the current classloader mirror.
-     *
-     * Use this method instead of the convenience method `universe.typeOf`, which
-     * assumes that all types can be found in the classloader that loaded scala-reflect classes.
-     * That's not necessarily the case when running using Eclipse launchers or even
-     * Sbt console or test (without `fork := true`).
-     *
-     * @see SPARK-5281
-     */
-    def localTypeOf[T: TypeTag]: `Type` = {
-        val tag = implicitly[TypeTag[T]]
-        tag.in(mirror).tpe.dealias
-    }
-
-    private def isValueClass(tpe: Type): Boolean = {
-        tpe.typeSymbol.isClass && tpe.typeSymbol.asClass.isDerivedValueClass
-    }
-
-    /** Returns the name and type of the underlying parameter of value class `tpe`. */
-    private def getUnderlyingTypeOfValueClass(tpe: `Type`): Type = {
-        getConstructorParameters(tpe).head._2
-    }
-
-    /**
-     * Returns the full class name for a type. The returned name is the canonical
-     * Scala name, where each component is separated by a period. It is NOT the
-     * Java-equivalent runtime name (no dollar signs).
-     *
-     * In simple cases, both the Scala and Java names are the same, however when Scala
-     * generates constructs that do not map to a Java equivalent, such as singleton objects
-     * or nested classes in package objects, it uses the dollar sign ($) to create
-     * synthetic classes, emulating behaviour in Java bytecode.
-     */
-    def getClassNameFromType(tpe: `Type`): String = {
-        tpe.dealias.erasure.typeSymbol.asClass.fullName
-    }
-
-    /**
-     * Returns the parameter names and types for the primary constructor of this type.
-     *
-     * Note that it only works for scala classes with primary constructor, and currently doesn't
-     * support inner class.
-     */
-    def getConstructorParameters(tpe: Type): Seq[(String, Type)] = {
-        val dealiasedTpe = tpe.dealias
-        val formalTypeArgs = dealiasedTpe.typeSymbol.asClass.typeParams
-        val TypeRef(_, _, actualTypeArgs) = dealiasedTpe
-        val params = constructParams(dealiasedTpe)
-        params.map { p =>
-            val paramTpe = p.typeSignature
-            if (isValueClass(paramTpe)) {
-                // Replace value class with underlying type
-                p.name.decodedName.toString -> getUnderlyingTypeOfValueClass(paramTpe)
-            } else {
-                p.name.decodedName.toString -> paramTpe.substituteTypes(formalTypeArgs, actualTypeArgs)
-            }
-        }
-    }
-
-    /**
-     * If our type is a Scala trait it may have a companion object that
-     * only defines a constructor via `apply` method.
-     */
-    private def getCompanionConstructor(tpe: Type): Symbol = {
-        def throwUnsupportedOperation = {
-            throw new UnsupportedOperationException(s"Unable to find constructor for $tpe. " +
-                s"This could happen if $tpe is an interface, or a trait without companion object " +
-                "constructor."
-            )
-        }
-
-        tpe.typeSymbol.asClass.companion match {
-            case NoSymbol => throwUnsupportedOperation
-            case sym => {
-                sym.asTerm.typeSignature.member(universe.TermName("apply")) match {
-                    case NoSymbol => throwUnsupportedOperation
-                    case constructorSym => constructorSym
-                }
-            }
-        }
-    }
-
-    protected def constructParams(tpe: Type): Seq[Symbol] = {
-        val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) match {
-            case NoSymbol => getCompanionConstructor(tpe)
-            case sym => sym
-        }
-        val params = if (constructorSymbol.isMethod) {
-            constructorSymbol.asMethod.paramLists
-        } else {
-            // Find the primary constructor, and use its parameter ordering.
-            val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find(
-                s => s.isMethod && s.asMethod.isPrimaryConstructor
-            )
-            if (primaryConstructorSymbol.isEmpty) {
-                sys.error("Internal SQL error: Product object did not have a primary constructor.")
-            } else {
-                primaryConstructorSymbol.get.asMethod.paramLists
-            }
-        }
-        params.flatten
-    }
-
-}
-
diff --git a/core/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala b/core/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala
deleted file mode 100644
index 76da9016..00000000
--- a/core/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala
+++ /dev/null
@@ -1,229 +0,0 @@
-/*-
- * =LICENSE=
- * Kotlin Spark API: Examples
- * ----------
- * Copyright (C) 2019 - 2020 JetBrains
- * ----------
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * =LICENSEEND=
- */
-package org.apache.spark.sql
-
-import org.apache.spark.sql.catalyst.analysis.Resolver
-import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression}
-import org.apache.spark.sql.catalyst.util.StringUtils
-import org.apache.spark.sql.types.{DataType, Metadata, StructField, StructType}
-
-
-trait DataTypeWithClass {
-    val dt: DataType
-    val cls: Class[ _ ]
-    val nullable: Boolean
-}
-
-trait ComplexWrapper extends DataTypeWithClass
-
-class KDataTypeWrapper(
-    val dt: StructType,
-    val cls: Class[ _ ],
-    val nullable: Boolean = true,
-) extends StructType with ComplexWrapper {
-
-    override def fieldNames: Array[ String ] = dt.fieldNames
-
-    override def names: Array[ String ] = dt.names
-
-    override def equals(that: Any): Boolean = dt.equals(that)
-
-    override def hashCode(): Int = dt.hashCode()
-
-    override def add(field: StructField): StructType = dt.add(field)
-
-    override def add(name: String, dataType: DataType): StructType = dt.add(name, dataType)
-
-    override def add(name: String, dataType: DataType, nullable: Boolean): StructType = dt.add(name, dataType, nullable)
-
-    override def add(name: String, dataType: DataType, nullable: Boolean, metadata: Metadata): StructType = dt
-        .add(name, dataType, nullable, metadata)
-
-    override def add(name: String, dataType: DataType, nullable: Boolean, comment: String): StructType = dt
-        .add(name, dataType, nullable, comment)
-
-    override def add(name: String, dataType: String): StructType = dt.add(name, dataType)
-
-    override def add(name: String, dataType: String, nullable: Boolean): StructType = dt.add(name, dataType, nullable)
-
-    override def add(name: String, dataType: String, nullable: Boolean, metadata: Metadata): StructType = dt
-        .add(name, dataType, nullable, metadata)
-
-    override def add(name: String, dataType: String, nullable: Boolean, comment: String): StructType = dt
-        .add(name, dataType, nullable, comment)
-
-    override def apply(name: String): StructField = dt.apply(name)
-
-    override def apply(names: Set[ String ]): StructType = dt.apply(names)
-
-    override def fieldIndex(name: String): Int = dt.fieldIndex(name)
-
-    override private[ sql ] def getFieldIndex(name: String) = dt.getFieldIndex(name)
-
-    //#if sparkMinor < 3.2
-    //$override
-    //#endif
-    private[ sql ] def findNestedField(fieldNames: Seq[ String ], includeCollections: Boolean, resolver: Resolver) =
-        dt.findNestedField(fieldNames, includeCollections, resolver)
-
-    override private[ sql ] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit =
-        dt.buildFormattedString(prefix, stringConcat, maxDepth)
-
-    override protected[ sql ] def toAttributes: Seq[ AttributeReference ] = dt.toAttributes
-
-    override def treeString: String = dt.treeString
-
-    override def treeString(maxDepth: Int): String = dt.treeString(maxDepth)
-
-    override def printTreeString(): Unit = dt.printTreeString()
-
-    private[ sql ] override def jsonValue = dt.jsonValue
-
-    override def apply(fieldIndex: Int): StructField = dt.apply(fieldIndex)
-
-    override def length: Int = dt.length
-
-    override def iterator: Iterator[ StructField ] = dt.iterator
-
-    override def defaultSize: Int = dt.defaultSize
-
-    override def simpleString: String = dt.simpleString
-
-    override def catalogString: String = dt.catalogString
-
-    override def sql: String = dt.sql
-
-    override def toDDL: String = dt.toDDL
-
-    private[ sql ] override def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields)
-
-    override private[ sql ] def merge(that: StructType) = dt.merge(that)
-
-    private[ spark ] override def asNullable = dt.asNullable
-
-    private[ spark ] override def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f)
-
-    override private[ sql ] lazy val interpretedOrdering = dt.interpretedOrdering
-
-    override def toString = s"KDataTypeWrapper(dt=$dt, cls=$cls, nullable=$nullable)"
-}
-
-case class KComplexTypeWrapper(dt: DataType, cls: Class[ _ ], nullable: Boolean) extends DataType with ComplexWrapper {
-
-    override private[ sql ] def unapply(e: Expression) = dt.unapply(e)
-
-    override def typeName: String = dt.typeName
-
-    override private[ sql ] def jsonValue = dt.jsonValue
-
-    override def json: String = dt.json
-
-    override def prettyJson: String = dt.prettyJson
-
-    override def simpleString: String = dt.simpleString
-
-    override def catalogString: String = dt.catalogString
-
-    override private[ sql ] def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields)
-
-    override def sql: String = dt.sql
-
-    override private[ spark ] def sameType(other: DataType) = dt.sameType(other)
-
-    override private[ spark ] def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f)
-
-    private[ sql ] override def defaultConcreteType = dt.defaultConcreteType
-
-    private[ sql ] override def acceptsType(other: DataType) = dt.acceptsType(other)
-
-    override def defaultSize: Int = dt.defaultSize
-
-    override private[ spark ] def asNullable = dt.asNullable
-
-}
-
-case class KSimpleTypeWrapper(dt: DataType, cls: Class[ _ ], nullable: Boolean) extends DataType with DataTypeWithClass {
-    override private[ sql ] def unapply(e: Expression) = dt.unapply(e)
-
-    override def typeName: String = dt.typeName
-
-    override private[ sql ] def jsonValue = dt.jsonValue
-
-    override def json: String = dt.json
-
-    override def prettyJson: String = dt.prettyJson
-
-    override def simpleString: String = dt.simpleString
-
-    override def catalogString: String = dt.catalogString
-
-    override private[ sql ] def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields)
-
-    override def sql: String = dt.sql
-
-    override private[ spark ] def sameType(other: DataType) = dt.sameType(other)
-
-    override private[ spark ] def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f)
-
-    private[ sql ] override def defaultConcreteType = dt.defaultConcreteType
-
-    private[ sql ] override def acceptsType(other: DataType) = dt.acceptsType(other)
-
-    override def defaultSize: Int = dt.defaultSize
-
-    override private[ spark ] def asNullable = dt.asNullable
-}
-
-class KStructField(val getterName: String, val delegate: StructField) extends StructField {
-
-    override private[ sql ] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit =
-        delegate.buildFormattedString(prefix, stringConcat, maxDepth)
-
-    override def toString(): String = delegate.toString()
-
-    override private[ sql ] def jsonValue = delegate.jsonValue
-
-    override def withComment(comment: String): StructField = delegate.withComment(comment)
-
-    override def getComment(): Option[ String ] = delegate.getComment()
-
-    override def toDDL: String = delegate.toDDL
-
-    override def productElement(n: Int): Any = delegate.productElement(n)
-
-    override def productArity: Int = delegate.productArity
-
-    override def productIterator: Iterator[ Any ] = delegate.productIterator
-
-    override def productPrefix: String = delegate.productPrefix
-
-    override val dataType: DataType = delegate.dataType
-
-    override def canEqual(that: Any): Boolean = delegate.canEqual(that)
-
-    override val metadata: Metadata = delegate.metadata
-    override val name: String = delegate.name
-    override val nullable: Boolean = delegate.nullable
-}
-
-object helpme {
-
-    def listToSeq(i: java.util.List[ _ ]): Seq[ _ ] = Seq(i.toArray: _*)
-}
\ No newline at end of file
diff --git a/core/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala b/core/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
deleted file mode 100644
index 864fc5f7..00000000
--- a/core/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
+++ /dev/null
@@ -1,493 +0,0 @@
-package org.apache.spark.sql.catalyst
-
-import kotlin.jvm.JvmClassMappingKt
-import kotlin.reflect.{KClass, KFunction, KProperty1}
-import kotlin.reflect.full.KClasses
-
-import java.lang.{Iterable => JavaIterable}
-import java.math.{BigDecimal => JavaBigDecimal}
-import java.math.{BigInteger => JavaBigInteger}
-import java.sql.{Date, Timestamp}
-import java.time.{Instant, LocalDate}
-import java.util.{Map => JavaMap}
-import javax.annotation.Nullable
-import scala.language.existentials
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.types._
-import org.apache.spark.unsafe.types.UTF8String
-
-/**
- * Functions to convert Scala types to Catalyst types and vice versa.
- */
-object CatalystTypeConverters {
-  // The Predef.Map is scala.collection.immutable.Map.
-  // Since the map values can be mutable, we explicitly import scala.collection.Map at here.
-
-  import scala.collection.Map
-
-  private[sql] def isPrimitive(dataType: DataType): Boolean = {
-    dataType match {
-      case BooleanType => true
-      case ByteType => true
-      case ShortType => true
-      case IntegerType => true
-      case LongType => true
-      case FloatType => true
-      case DoubleType => true
-      case _ => false
-    }
-  }
-
-  private def getConverterForType(dataType: DataType): CatalystTypeConverter[Any, Any, Any] = {
-    val converter = dataType match {
-      case udt: UserDefinedType[_] => UDTConverter(udt)
-      case arrayType: ArrayType => ArrayConverter(arrayType.elementType)
-      case mapType: MapType => MapConverter(mapType.keyType, mapType.valueType)
-      case structType: StructType => StructConverter(structType)
-      case StringType => StringConverter
-      case DateType if SQLConf.get.datetimeJava8ApiEnabled => LocalDateConverter
-      case DateType => DateConverter
-      case TimestampType if SQLConf.get.datetimeJava8ApiEnabled => InstantConverter
-      case TimestampType => TimestampConverter
-      case dt: DecimalType => new DecimalConverter(dt)
-      case BooleanType => BooleanConverter
-      case ByteType => ByteConverter
-      case ShortType => ShortConverter
-      case IntegerType => IntConverter
-      case LongType => LongConverter
-      case FloatType => FloatConverter
-      case DoubleType => DoubleConverter
-      case dataType: DataType => IdentityConverter(dataType)
-    }
-    converter.asInstanceOf[CatalystTypeConverter[Any, Any, Any]]
-  }
-
-  /**
-   * Converts a Scala type to its Catalyst equivalent (and vice versa).
-   *
-   * @tparam ScalaInputType  The type of Scala values that can be converted to Catalyst.
-   * @tparam ScalaOutputType The type of Scala values returned when converting Catalyst to Scala.
-   * @tparam CatalystType    The internal Catalyst type used to represent values of this Scala type.
-   */
-  private abstract class CatalystTypeConverter[ScalaInputType, ScalaOutputType, CatalystType]
-    extends Serializable {
-
-    /**
-     * Converts a Scala type to its Catalyst equivalent while automatically handling nulls
-     * and Options.
-     */
-    final def toCatalyst(@Nullable maybeScalaValue: Any): CatalystType = {
-      if (maybeScalaValue == null) {
-        null.asInstanceOf[CatalystType]
-      } else maybeScalaValue match {
-        case opt: Option[ScalaInputType] =>
-          if (opt.isDefined) {
-            toCatalystImpl(opt.get)
-          } else {
-            null.asInstanceOf[CatalystType]
-          }
-        case _ =>
-          toCatalystImpl(maybeScalaValue.asInstanceOf[ScalaInputType])
-      }
-    }
-
-    /**
-     * Given a Catalyst row, convert the value at column `column` to its Scala equivalent.
-     */
-    final def toScala(row: InternalRow, column: Int): ScalaOutputType = {
-      if (row.isNullAt(column)) null.asInstanceOf[ScalaOutputType] else toScalaImpl(row, column)
-    }
-
-    /**
-     * Convert a Catalyst value to its Scala equivalent.
-     */
-    def toScala(@Nullable catalystValue: CatalystType): ScalaOutputType
-
-    /**
-     * Converts a Scala value to its Catalyst equivalent.
-     *
-     * @param scalaValue the Scala value, guaranteed not to be null.
-     * @return the Catalyst value.
-     */
-    protected def toCatalystImpl(scalaValue: ScalaInputType): CatalystType
-
-    /**
-     * Given a Catalyst row, convert the value at column `column` to its Scala equivalent.
-     * This method will only be called on non-null columns.
-     */
-    protected def toScalaImpl(row: InternalRow, column: Int): ScalaOutputType
-  }
-
-  private case class IdentityConverter(dataType: DataType)
-    extends CatalystTypeConverter[Any, Any, Any] {
-    override def toCatalystImpl(scalaValue: Any): Any = scalaValue
-
-    override def toScala(catalystValue: Any): Any = catalystValue
-
-    override def toScalaImpl(row: InternalRow, column: Int): Any = row.get(column, dataType)
-  }
-
-  private case class UDTConverter[A >: Null](
-                                              udt: UserDefinedType[A]) extends CatalystTypeConverter[A, A, Any] {
-    // toCatalyst (it calls toCatalystImpl) will do null check.
-    override def toCatalystImpl(scalaValue: A): Any = udt.serialize(scalaValue)
-
-    override def toScala(catalystValue: Any): A = {
-      if (catalystValue == null) null else udt.deserialize(catalystValue)
-    }
-
-    override def toScalaImpl(row: InternalRow, column: Int): A =
-      toScala(row.get(column, udt.sqlType))
-  }
-
-  /** Converter for arrays, sequences, and Java iterables. */
-  private case class ArrayConverter(
-                                     elementType: DataType) extends CatalystTypeConverter[Any, Seq[Any], ArrayData] {
-
-    private[this] val elementConverter = getConverterForType(elementType)
-
-    override def toCatalystImpl(scalaValue: Any): ArrayData = {
-      scalaValue match {
-        case a: Array[_] =>
-          new GenericArrayData(a.map(elementConverter.toCatalyst))
-        case s: Seq[_] =>
-          new GenericArrayData(s.map(elementConverter.toCatalyst).toArray)
-        case i: JavaIterable[_] =>
-          val iter = i.iterator
-          val convertedIterable = scala.collection.mutable.ArrayBuffer.empty[Any]
-          while (iter.hasNext) {
-            val item = iter.next()
-            convertedIterable += elementConverter.toCatalyst(item)
-          }
-          new GenericArrayData(convertedIterable.toArray)
-        case other => throw new IllegalArgumentException(
-          s"The value (${other.toString}) of the type (${other.getClass.getCanonicalName}) "
-            + s"cannot be converted to an array of ${elementType.catalogString}")
-      }
-    }
-
-    override def toScala(catalystValue: ArrayData): Seq[Any] = {
-      if (catalystValue == null) {
-        null
-      } else if (isPrimitive(elementType)) {
-        catalystValue.toArray[Any](elementType)
-      } else {
-        val result = new Array[Any](catalystValue.numElements())
-        catalystValue.foreach(elementType, (i, e) => {
-          result(i) = elementConverter.toScala(e)
-        })
-        result
-      }
-    }
-
-    override def toScalaImpl(row: InternalRow, column: Int): Seq[Any] =
-      toScala(row.getArray(column))
-  }
-
-  private case class MapConverter(
-                                   keyType: DataType,
-                                   valueType: DataType)
-    extends CatalystTypeConverter[Any, Map[Any, Any], MapData] {
-
-    private[this] val keyConverter = getConverterForType(keyType)
-    private[this] val valueConverter = getConverterForType(valueType)
-
-    override def toCatalystImpl(scalaValue: Any): MapData = {
-      val keyFunction = (k: Any) => keyConverter.toCatalyst(k)
-      val valueFunction = (k: Any) => valueConverter.toCatalyst(k)
-
-      scalaValue match {
-        case map: Map[_, _] => ArrayBasedMapData(map, keyFunction, valueFunction)
-        case javaMap: JavaMap[_, _] => ArrayBasedMapData(javaMap, keyFunction, valueFunction)
-        case other => throw new IllegalArgumentException(
-          s"The value (${other.toString}) of the type (${other.getClass.getCanonicalName}) "
-            + "cannot be converted to a map type with "
-            + s"key type (${keyType.catalogString}) and value type (${valueType.catalogString})")
-      }
-    }
-
-    override def toScala(catalystValue: MapData): Map[Any, Any] = {
-      if (catalystValue == null) {
-        null
-      } else {
-        val keys = catalystValue.keyArray().toArray[Any](keyType)
-        val values = catalystValue.valueArray().toArray[Any](valueType)
-        val convertedKeys =
-          if (isPrimitive(keyType)) keys else keys.map(keyConverter.toScala)
-        val convertedValues =
-          if (isPrimitive(valueType)) values else values.map(valueConverter.toScala)
-
-        convertedKeys.zip(convertedValues).toMap
-      }
-    }
-
-    override def toScalaImpl(row: InternalRow, column: Int): Map[Any, Any] =
-      toScala(row.getMap(column))
-  }
-
-  private case class StructConverter(
-                                      structType: StructType) extends CatalystTypeConverter[Any, Row, InternalRow] {
-
-    private[this] val converters = structType.fields.map { f => getConverterForType(f.dataType) }
-
-    override def toCatalystImpl(scalaValue: Any): InternalRow = scalaValue match {
-      case row: Row =>
-        val ar = new Array[Any](row.size)
-        var idx = 0
-        while (idx < row.size) {
-          ar(idx) = converters(idx).toCatalyst(row(idx))
-          idx += 1
-        }
-        new GenericInternalRow(ar)
-
-      case p: Product =>
-        val ar = new Array[Any](structType.size)
-        val iter = p.productIterator
-        var idx = 0
-        while (idx < structType.size) {
-          ar(idx) = converters(idx).toCatalyst(iter.next())
-          idx += 1
-        }
-        new GenericInternalRow(ar)
-
-      case ktDataClass: Any if JvmClassMappingKt.getKotlinClass(ktDataClass.getClass).isData =>
-        import scala.collection.JavaConverters._
-        val klass: KClass[Any] = JvmClassMappingKt.getKotlinClass(ktDataClass.getClass).asInstanceOf[KClass[Any]]
-        val iter: Iterator[KProperty1[Any,_]] = KClasses.getDeclaredMemberProperties(klass).iterator().asScala
-        val ar = new Array[Any](structType.size)
-        var idx = 0
-        while (idx < structType.size) {
-          ar(idx) = converters(idx).toCatalyst(iter.next().get(ktDataClass))
-          idx += 1
-        }
-        new GenericInternalRow(ar)
-
-      case other => throw new IllegalArgumentException(
-        s"The value (${other.toString}) of the type (${other.getClass.getCanonicalName}) "
-          + s"cannot be converted to ${structType.catalogString}")
-    }
-
-    override def toScala(row: InternalRow): Row = {
-      if (row == null) {
-        null
-      } else {
-        val ar = new Array[Any](row.numFields)
-        var idx = 0
-        while (idx < row.numFields) {
-          ar(idx) = converters(idx).toScala(row, idx)
-          idx += 1
-        }
-        new GenericRowWithSchema(ar, structType)
-      }
-    }
-
-    override def toScalaImpl(row: InternalRow, column: Int): Row =
-      toScala(row.getStruct(column, structType.size))
-  }
-
-  private object StringConverter extends CatalystTypeConverter[Any, String, UTF8String] {
-    override def toCatalystImpl(scalaValue: Any): UTF8String = scalaValue match {
-      case str: String => UTF8String.fromString(str)
-      case utf8: UTF8String => utf8
-      case chr: Char => UTF8String.fromString(chr.toString)
-      case other => throw new IllegalArgumentException(
-        s"The value (${other.toString}) of the type (${other.getClass.getCanonicalName}) "
-          + s"cannot be converted to the string type")
-    }
-
-    override def toScala(catalystValue: UTF8String): String =
-      if (catalystValue == null) null else catalystValue.toString
-
-    override def toScalaImpl(row: InternalRow, column: Int): String =
-      row.getUTF8String(column).toString
-  }
-
-  private object DateConverter extends CatalystTypeConverter[Date, Date, Any] {
-    override def toCatalystImpl(scalaValue: Date): Int = DateTimeUtils.fromJavaDate(scalaValue)
-
-    override def toScala(catalystValue: Any): Date =
-      if (catalystValue == null) null else DateTimeUtils.toJavaDate(catalystValue.asInstanceOf[Int])
-
-    override def toScalaImpl(row: InternalRow, column: Int): Date =
-      DateTimeUtils.toJavaDate(row.getInt(column))
-  }
-
-  private object LocalDateConverter extends CatalystTypeConverter[LocalDate, LocalDate, Any] {
-    override def toCatalystImpl(scalaValue: LocalDate): Int = {
-      DateTimeUtils.localDateToDays(scalaValue)
-    }
-
-    override def toScala(catalystValue: Any): LocalDate = {
-      if (catalystValue == null) null
-      else DateTimeUtils.daysToLocalDate(catalystValue.asInstanceOf[Int])
-    }
-
-    override def toScalaImpl(row: InternalRow, column: Int): LocalDate =
-      DateTimeUtils.daysToLocalDate(row.getInt(column))
-  }
-
-  private object TimestampConverter extends CatalystTypeConverter[Timestamp, Timestamp, Any] {
-    override def toCatalystImpl(scalaValue: Timestamp): Long =
-      DateTimeUtils.fromJavaTimestamp(scalaValue)
-
-    override def toScala(catalystValue: Any): Timestamp =
-      if (catalystValue == null) null
-      else DateTimeUtils.toJavaTimestamp(catalystValue.asInstanceOf[Long])
-
-    override def toScalaImpl(row: InternalRow, column: Int): Timestamp =
-      DateTimeUtils.toJavaTimestamp(row.getLong(column))
-  }
-
-  private object InstantConverter extends CatalystTypeConverter[Instant, Instant, Any] {
-    override def toCatalystImpl(scalaValue: Instant): Long =
-      DateTimeUtils.instantToMicros(scalaValue)
-
-    override def toScala(catalystValue: Any): Instant =
-      if (catalystValue == null) null
-      else DateTimeUtils.microsToInstant(catalystValue.asInstanceOf[Long])
-
-    override def toScalaImpl(row: InternalRow, column: Int): Instant =
-      DateTimeUtils.microsToInstant(row.getLong(column))
-  }
-
-  private class DecimalConverter(dataType: DecimalType)
-    extends CatalystTypeConverter[Any, JavaBigDecimal, Decimal] {
-
-    private val nullOnOverflow = !SQLConf.get.ansiEnabled
-
-    override def toCatalystImpl(scalaValue: Any): Decimal = {
-      val decimal = scalaValue match {
-        case d: BigDecimal => Decimal(d)
-        case d: JavaBigDecimal => Decimal(d)
-        case d: JavaBigInteger => Decimal(d)
-        case d: Decimal => d
-        case other => throw new IllegalArgumentException(
-          s"The value (${other.toString}) of the type (${other.getClass.getCanonicalName}) "
-            + s"cannot be converted to ${dataType.catalogString}")
-      }
-      decimal.toPrecision(dataType.precision, dataType.scale, Decimal.ROUND_HALF_UP, nullOnOverflow)
-    }
-
-    override def toScala(catalystValue: Decimal): JavaBigDecimal = {
-      if (catalystValue == null) null
-      else catalystValue.toJavaBigDecimal
-    }
-
-    override def toScalaImpl(row: InternalRow, column: Int): JavaBigDecimal =
-      row.getDecimal(column, dataType.precision, dataType.scale).toJavaBigDecimal
-  }
-
-  private abstract class PrimitiveConverter[T] extends CatalystTypeConverter[T, Any, Any] {
-    final override def toScala(catalystValue: Any): Any = catalystValue
-
-    final override def toCatalystImpl(scalaValue: T): Any = scalaValue
-  }
-
-  private object BooleanConverter extends PrimitiveConverter[Boolean] {
-    override def toScalaImpl(row: InternalRow, column: Int): Boolean = row.getBoolean(column)
-  }
-
-  private object ByteConverter extends PrimitiveConverter[Byte] {
-    override def toScalaImpl(row: InternalRow, column: Int): Byte = row.getByte(column)
-  }
-
-  private object ShortConverter extends PrimitiveConverter[Short] {
-    override def toScalaImpl(row: InternalRow, column: Int): Short = row.getShort(column)
-  }
-
-  private object IntConverter extends PrimitiveConverter[Int] {
-    override def toScalaImpl(row: InternalRow, column: Int): Int = row.getInt(column)
-  }
-
-  private object LongConverter extends PrimitiveConverter[Long] {
-    override def toScalaImpl(row: InternalRow, column: Int): Long = row.getLong(column)
-  }
-
-  private object FloatConverter extends PrimitiveConverter[Float] {
-    override def toScalaImpl(row: InternalRow, column: Int): Float = row.getFloat(column)
-  }
-
-  private object DoubleConverter extends PrimitiveConverter[Double] {
-    override def toScalaImpl(row: InternalRow, column: Int): Double = row.getDouble(column)
-  }
-
-  /**
-   * Creates a converter function that will convert Scala objects to the specified Catalyst type.
-   * Typical use case would be converting a collection of rows that have the same schema. You will
-   * call this function once to get a converter, and apply it to every row.
-   */
-  def createToCatalystConverter(dataType: DataType): Any => Any = {
-    if (isPrimitive(dataType)) {
-      // Although the `else` branch here is capable of handling inbound conversion of primitives,
-      // we add some special-case handling for those types here. The motivation for this relates to
-      // Java method invocation costs: if we have rows that consist entirely of primitive columns,
-      // then returning the same conversion function for all of the columns means that the call site
-      // will be monomorphic instead of polymorphic. In microbenchmarks, this actually resulted in
-      // a measurable performance impact. Note that this optimization will be unnecessary if we
-      // use code generation to construct Scala Row -> Catalyst Row converters.
-      def convert(maybeScalaValue: Any): Any = {
-        maybeScalaValue match {
-          case option: Option[Any] =>
-            option.orNull
-          case _ =>
-            maybeScalaValue
-        }
-      }
-
-      convert
-    } else {
-      getConverterForType(dataType).toCatalyst
-    }
-  }
-
-  /**
-   * Creates a converter function that will convert Catalyst types to Scala type.
-   * Typical use case would be converting a collection of rows that have the same schema. You will
-   * call this function once to get a converter, and apply it to every row.
-   */
-  def createToScalaConverter(dataType: DataType): Any => Any = {
-    if (isPrimitive(dataType)) {
-      identity
-    } else {
-      getConverterForType(dataType).toScala
-    }
-  }
-
-  /**
-   * Converts Scala objects to Catalyst rows / types.
-   *
-   * Note: This should be called before do evaluation on Row
-   * (It does not support UDT)
-   * This is used to create an RDD or test results with correct types for Catalyst.
-   */
-  def convertToCatalyst(a: Any): Any = a match {
-    case s: String => StringConverter.toCatalyst(s)
-    case d: Date => DateConverter.toCatalyst(d)
-    case ld: LocalDate => LocalDateConverter.toCatalyst(ld)
-    case t: Timestamp => TimestampConverter.toCatalyst(t)
-    case i: Instant => InstantConverter.toCatalyst(i)
-    case d: BigDecimal => new DecimalConverter(DecimalType(d.precision, d.scale)).toCatalyst(d)
-    case d: JavaBigDecimal => new DecimalConverter(DecimalType(d.precision, d.scale)).toCatalyst(d)
-    case seq: Seq[Any] => new GenericArrayData(seq.map(convertToCatalyst).toArray)
-    case r: Row => InternalRow(r.toSeq.map(convertToCatalyst): _*)
-    case arr: Array[Any] => new GenericArrayData(arr.map(convertToCatalyst))
-    case map: Map[_, _] =>
-      ArrayBasedMapData(
-        map,
-        (key: Any) => convertToCatalyst(key),
-        (value: Any) => convertToCatalyst(value))
-    case other => other
-  }
-
-  /**
-   * Converts Catalyst types used internally in rows to standard Scala types
-   * This method is slow, and for batch conversion you should be using converter
-   * produced by createToScalaConverter.
-   */
-  def convertToScala(catalystValue: Any, dataType: DataType): Any = {
-    createToScalaConverter(dataType)(catalystValue)
-  }
-}
diff --git a/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala b/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala
deleted file mode 100644
index eb5a1a47..00000000
--- a/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package org.jetbrains.kotlinx.spark.extensions
-
-case class DemoCaseClass[T](a: Int, b: T)
diff --git a/examples/build.gradle.kts b/examples/build.gradle.kts
index 90f45fe4..8683926a 100644
--- a/examples/build.gradle.kts
+++ b/examples/build.gradle.kts
@@ -1,26 +1,40 @@
 import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
 
 plugins {
-    kotlin
-    idea
+    // Needs to be installed in the local maven repository or have the bootstrap jar on the classpath
+    id("org.jetbrains.kotlinx.spark.api")
+    java
+    kotlin("jvm")
+    kotlin("plugin.noarg") version Versions.kotlin
+}
+
+noArg {
+    annotation("org.jetbrains.kotlinx.spark.examples.NoArg")
+}
+
+kotlinSparkApi {
+    enabled = true
+    sparkifyAnnotationFqNames = listOf(
+        "org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify",
+    )
 }
 
 group = Versions.groupID
 version = Versions.project
 
 repositories {
+    mavenLocal()
     mavenCentral()
 }
 
 dependencies {
-
-    with(Projects) {
+    Projects {
         implementation(
             kotlinSparkApi,
         )
     }
 
-    with(Dependencies) {
+    Dependencies {
 
         // https://github.com/FasterXML/jackson-bom/issues/52
         if (Versions.spark == "3.3.1") implementation(jacksonDatabind)
@@ -31,14 +45,12 @@ dependencies {
             sparkStreaming,
             sparkStreamingKafka,
         )
-
     }
 }
 
 kotlin {
+    jvmToolchain(8)
     jvmToolchain {
-        languageVersion.set(
-            JavaLanguageVersion.of(Versions.jvmTarget)
-        )
+        languageVersion = JavaLanguageVersion.of(Versions.jvmTarget)
     }
-}
+}
\ No newline at end of file
diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/UDFs.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/UDFs.kt
index 4b2e497e..455bf267 100644
--- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/UDFs.kt
+++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/UDFs.kt
@@ -270,10 +270,10 @@ private object MyAverage : Aggregator<Employee, Average, Double>() {
     override fun finish(reduction: Average): Double = reduction.sum.toDouble() / reduction.count
 
     // Specifies the Encoder for the intermediate value type
-    override fun bufferEncoder(): Encoder<Average> = encoder()
+    override fun bufferEncoder(): Encoder<Average> = kotlinEncoderFor()
 
     // Specifies the Encoder for the final output value type
-    override fun outputEncoder(): Encoder<Double> = encoder()
+    override fun outputEncoder(): Encoder<Double> = kotlinEncoderFor()
 
 }
 
diff --git a/gradle-plugin/build.gradle.kts b/gradle-plugin/build.gradle.kts
new file mode 100644
index 00000000..fb77282e
--- /dev/null
+++ b/gradle-plugin/build.gradle.kts
@@ -0,0 +1,77 @@
+@file:Suppress("UnstableApiUsage")
+
+plugins {
+    `java-gradle-plugin`
+    kotlin
+    buildconfig
+    signing
+    gradlePublishPlugin
+}
+
+group = Versions.groupID
+version = Versions.project
+
+publishing {
+    repositories {
+        maven {
+            name = "localPluginRepository"
+            url = uri("~/.m2/repository")
+        }
+    }
+}
+
+gradlePlugin {
+    website = "https://github.com/Kotlin/kotlin-spark-api"
+    vcsUrl = "https://github.com/Kotlin/kotlin-spark-api"
+
+    plugins.create("kotlin-spark-api") {
+        id = "${Versions.groupID}.api"
+        displayName = "Kotlin Spark API (Gradle) Compiler Plugin"
+        description = "TODO"
+        tags = setOf("kotlin", "spark", "compiler", "gradle", "Sparkify", "columnName")
+        implementationClass = "${Versions.groupID}.api.gradlePlugin.SparkKotlinCompilerGradlePlugin"
+    }
+}
+
+repositories {
+    mavenCentral()
+    maven("https://maven.pkg.jetbrains.space/kotlin/p/kotlin/bootstrap")
+}
+
+dependencies {
+    Dependencies {
+        compileOnly(
+            kotlinStdLib,
+            kotlinGradlePlugin,
+            gradleApi(),
+            gradleKotlinDsl()
+        )
+    }
+}
+
+kotlin {
+    jvmToolchain {
+        languageVersion = JavaLanguageVersion.of(Versions.jvmTarget)
+    }
+}
+
+/**
+ * Copies the built jar file to the gradle/bootstraps directory.
+ * This allows the project to use the gradle plugin without mavenLocal.
+ */
+val updateBootstrapVersion by tasks.creating(Copy::class) {
+    group = "build"
+    dependsOn(tasks.jar)
+
+    val jarFile = tasks.jar.get().outputs.files.files.single {
+        it.extension == "jar" && it.name.startsWith("gradle-plugin")
+    }
+    from(jarFile)
+    rename { "gradle-plugin.jar" }
+    into(project.rootDir.resolve("gradle/bootstraps"))
+    outputs.upToDateWhen { false }
+}
+
+tasks.build {
+    finalizedBy(updateBootstrapVersion)
+}
diff --git a/gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/gradlePlugin/SparkKotlinCompilerExtension.kt b/gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/gradlePlugin/SparkKotlinCompilerExtension.kt
new file mode 100644
index 00000000..d8dd2661
--- /dev/null
+++ b/gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/gradlePlugin/SparkKotlinCompilerExtension.kt
@@ -0,0 +1,31 @@
+package org.jetbrains.kotlinx.spark.api.gradlePlugin
+
+import org.gradle.api.Project
+import org.gradle.api.file.DirectoryProperty
+import org.gradle.api.provider.ListProperty
+import org.gradle.api.provider.Property
+import org.jetbrains.kotlinx.spark.api.Artifacts
+import javax.inject.Inject
+
+abstract class SparkKotlinCompilerExtension @Inject constructor(project: Project) {
+
+    val enabled: Property<Boolean> = project
+        .objects
+        .property(Boolean::class.javaObjectType)
+        .convention(true)
+
+    val sparkifyAnnotationFqNames: ListProperty<String> = project
+        .objects
+        .listProperty(String::class.java)
+        .convention(listOf(Artifacts.defaultSparkifyFqName))
+
+    val columnNameAnnotationFqNames: ListProperty<String> = project
+        .objects
+        .listProperty(String::class.java)
+        .convention(listOf(Artifacts.defaultColumnNameFqName))
+
+    val outputDir: DirectoryProperty = project
+        .objects
+        .directoryProperty()
+        .convention(project.layout.buildDirectory.dir("generated/sources/sparkKotlinCompilerPlugin"))
+}
\ No newline at end of file
diff --git a/gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/gradlePlugin/SparkKotlinCompilerGradlePlugin.kt b/gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/gradlePlugin/SparkKotlinCompilerGradlePlugin.kt
new file mode 100644
index 00000000..fa3c77c5
--- /dev/null
+++ b/gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/gradlePlugin/SparkKotlinCompilerGradlePlugin.kt
@@ -0,0 +1,67 @@
+package org.jetbrains.kotlinx.spark.api.gradlePlugin
+
+import org.gradle.api.Project
+import org.gradle.api.provider.Provider
+import org.gradle.kotlin.dsl.findByType
+import org.jetbrains.kotlin.gradle.dsl.KotlinJvmProjectExtension
+import org.jetbrains.kotlin.gradle.plugin.KotlinCompilation
+import org.jetbrains.kotlin.gradle.plugin.KotlinCompilerPluginSupportPlugin
+import org.jetbrains.kotlin.gradle.plugin.SubpluginArtifact
+import org.jetbrains.kotlin.gradle.plugin.SubpluginOption
+import org.jetbrains.kotlinx.spark.api.Artifacts
+
+class SparkKotlinCompilerGradlePlugin : KotlinCompilerPluginSupportPlugin {
+
+    override fun apply(target: Project) {
+        target.extensions.create("kotlinSparkApi", SparkKotlinCompilerExtension::class.java, target)
+
+        target.afterEvaluate {
+            it.extensions.findByType<KotlinJvmProjectExtension>()?.apply {
+                compilerOptions {
+                    // Make sure the parameters of data classes are visible to scala
+//                    javaParameters.set(true)
+
+                    // Avoid NotSerializableException by making lambdas serializable
+                    freeCompilerArgs.add("-Xlambdas=class")
+                }
+            }
+        }
+    }
+
+    override fun applyToCompilation(kotlinCompilation: KotlinCompilation<*>): Provider<List<SubpluginOption>> {
+        val target = kotlinCompilation.target.name
+        val sourceSetName = kotlinCompilation.defaultSourceSet.name
+
+        val project = kotlinCompilation.target.project
+        val extension = project.extensions.getByType(SparkKotlinCompilerExtension::class.java)
+
+        val enabled = extension.enabled.get()
+        val sparkifyAnnotationFqNames = extension.sparkifyAnnotationFqNames.get()
+        val columnNameAnnotationFqNames = extension.columnNameAnnotationFqNames.get()
+
+        val outputDir = extension.outputDir.get().dir("$target/$sourceSetName/kotlin")
+        kotlinCompilation.defaultSourceSet.kotlin.srcDir(outputDir.asFile)
+
+        val provider = project.provider {
+            listOf(
+                SubpluginOption(key = "enabled", value = enabled.toString()),
+                SubpluginOption(key = "sparkifyAnnotationFqNames", value = sparkifyAnnotationFqNames.joinToString()),
+                SubpluginOption(key = "columnNameAnnotationFqNames", value = columnNameAnnotationFqNames.joinToString()),
+            )
+        }
+        return provider
+    }
+
+    override fun getCompilerPluginId() = Artifacts.compilerPluginId
+
+    override fun getPluginArtifact(): SubpluginArtifact =
+        SubpluginArtifact(
+            groupId = Artifacts.groupId,
+            artifactId = Artifacts.compilerPluginArtifactId,
+            version = Artifacts.projectVersion,
+        )
+
+    override fun isApplicable(kotlinCompilation: KotlinCompilation<*>): Boolean = true
+}
+
+
diff --git a/gradle.properties b/gradle.properties
index bcfebf83..f577604f 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -2,16 +2,15 @@ kotlin.daemon.jvmargs=-Xmx8g
 org.gradle.jvmargs=-Xmx8g -XX:MaxMetaspaceSize=1g -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
 mavenCentralUsername=dummy
 mavenCentralPassword=dummy
-
 GROUP=org.jetbrains.kotlinx.spark
-
 # Controls the spark and scala version for the entire project
 # can also be defined like ./gradlew -Pspark=X.X.X -Pscala=X.X.X build
-spark=3.3.2
-scala=2.13.10
-# scala=2.12.17
-skipScalaTuplesInKotlin=false
-
+spark=3.5.1
+#spark=3.4.2
+scala=2.13.13
+#scala=2.12.19
+skipScalaOnlyDependent=false
+sparkConnect=false
 org.gradle.caching=true
 org.gradle.parallel=false
 #kotlin.incremental.useClasspathSnapshot=true
diff --git a/gradle/bootstraps/compiler-plugin.jar b/gradle/bootstraps/compiler-plugin.jar
new file mode 100644
index 00000000..6de5e469
Binary files /dev/null and b/gradle/bootstraps/compiler-plugin.jar differ
diff --git a/gradle/bootstraps/gradle-plugin.jar b/gradle/bootstraps/gradle-plugin.jar
new file mode 100644
index 00000000..740a990d
Binary files /dev/null and b/gradle/bootstraps/gradle-plugin.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 070cb702..a5952066 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists
diff --git a/gradlew_all_versions b/gradlew_all_versions
index 19cab96e..98e9430e 100755
--- a/gradlew_all_versions
+++ b/gradlew_all_versions
@@ -5,10 +5,10 @@ set -euo pipefail
 # but now like `./gradlew_all_versions arguments`.
 
 DRY_RUN=${DRY_RUN:-false}
-SCALA2_12VERSION="2.12.16"
-SCALA2_13VERSION="2.13.8"
-SparkVersionsForBothScalaVersions=("3.3.0" "3.2.1" "3.2.0")
-SparkVersionsForScala2_12=("3.1.3" "3.1.2" "3.1.1" "3.1.0" "3.0.3" "3.0.2" "3.0.1" "3.0.0")
+SCALA2_12VERSION="2.12.19"
+SCALA2_13VERSION="2.13.13"
+SparkVersionsForBothScalaVersions=("3.4.2" "3.5.1")
+SparkVersionsForScala2_12=()
 
 echo Running for "$(expr ${#SparkVersionsForBothScalaVersions[@]} \* 2 + ${#SparkVersionsForScala2_12[@]}) versions of the library."
 
@@ -19,33 +19,37 @@ fi
 
 ARGS=("$@")
 execute() {
-  echo "running ./gradlew -Pspark=$SPARK -Pscala=$SCALA -PskipScalaTuplesInKotlin=$SKIP_SCALA_TUPLES -PenforceCleanJCP=true ${ARGS[*]}"
+  echo "running ./gradlew -Pspark=$SPARK -Pscala=$SCALA -PskipScalaOnlyDependent=$SKIP_SCALA_TUPLES -PenforceCleanJCP=true ${ARGS[*]}"
   if [ "$DRY_RUN" = false ]; then
-    ./gradlew -Pspark="$SPARK" -Pscala="$SCALA" -PskipScalaTuplesInKotlin="$SKIP_SCALA_TUPLES" "${ARGS[@]}"
+    ./gradlew -Pspark="$SPARK" -Pscala="$SCALA" -PskipScalaOnlyDependent="$SKIP_SCALA_TUPLES" "${ARGS[@]}"
   fi
 }
 
-SCALA="$SCALA2_12VERSION"
+#SCALA="$SCALA2_12VERSION"
 SKIP_SCALA_TUPLES=false
-for spark in "${SparkVersionsForScala2_12[@]}"; do
-  SPARK="$spark"
-  execute
-  SKIP_SCALA_TUPLES=true
-done
+#for spark in "${SparkVersionsForScala2_12[@]}"; do
+#  SPARK="$spark"
+#  execute
+#  SKIP_SCALA_TUPLES=true
+#done
 
 
 execute_for_both_scala_versions() {
   for spark in "${SparkVersionsForBothScalaVersions[@]}"; do
     SPARK="$spark"
     execute
-    SKIP_SCALA_TUPLES=true
+    if [ SPARK != "${SparkVersionsForBothScalaVersions[0]}" ]; then
+      SKIP_SCALA_TUPLES=true
+    else
+      SKIP_SCALA_TUPLES=false
+    fi
   done
 }
 SCALA="$SCALA2_12VERSION"
 execute_for_both_scala_versions
 
 SCALA="$SCALA2_13VERSION"
-SKIP_SCALA_TUPLES=false
+#SKIP_SCALA_TUPLES=false
 execute_for_both_scala_versions
 
 
diff --git a/jupyter/build.gradle.kts b/jupyter/build.gradle.kts
index bac43fb3..fb5e90de 100644
--- a/jupyter/build.gradle.kts
+++ b/jupyter/build.gradle.kts
@@ -1,10 +1,9 @@
-@file:Suppress("UnstableApiUsage", "NOTHING_TO_INLINE")
+@file:Suppress("UnstableApiUsage")
 
 import com.igormaznitsa.jcp.gradle.JcpTask
 import com.vanniktech.maven.publish.JavadocJar.Dokka
 import com.vanniktech.maven.publish.KotlinJvm
 import org.jetbrains.dokka.gradle.AbstractDokkaLeafTask
-import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
 
 plugins {
     scala
@@ -37,13 +36,13 @@ tasks.processJupyterApiResources {
 }
 
 dependencies {
-    with(Projects) {
+    Projects {
         api(
             kotlinSparkApi,
         )
     }
 
-    with(Dependencies) {
+    Dependencies {
 
         // https://github.com/FasterXML/jackson-bom/issues/52
         if (Versions.spark == "3.3.1") implementation(jacksonDatabind)
@@ -74,10 +73,10 @@ dependencies {
 val kotlinMainSources = kotlin.sourceSets.main.get().kotlin.sourceDirectories
 
 val preprocessMain by tasks.creating(JcpTask::class) {
-    sources.set(kotlinMainSources)
-    clearTarget.set(true)
-    fileExtensions.set(listOf("kt"))
-    vars.set(Versions.versionMap)
+    sources = kotlinMainSources
+    clearTarget = true
+    fileExtensions = listOf("kt")
+    vars = Versions.versionMap
     outputs.upToDateWhen { target.get().exists() }
 }
 
@@ -110,10 +109,10 @@ tasks.compileKotlin {
 val kotlinTestSources = kotlin.sourceSets.test.get().kotlin.sourceDirectories
 
 val preprocessTest by tasks.creating(JcpTask::class) {
-    sources.set(kotlinTestSources)
-    clearTarget.set(true)
-    fileExtensions.set(listOf("java", "kt"))
-    vars.set(Versions.versionMap)
+    sources = kotlinTestSources
+    clearTarget = true
+    fileExtensions = listOf("java", "kt")
+    vars = Versions.versionMap
     outputs.upToDateWhen { target.get().exists() }
 }
 
@@ -143,9 +142,7 @@ tasks.compileTestKotlin {
 
 kotlin {
     jvmToolchain {
-        languageVersion.set(
-            JavaLanguageVersion.of(Versions.jupyterJvmTarget)
-        )
+        languageVersion = JavaLanguageVersion.of(Versions.jupyterJvmTarget)
     }
 }
 
diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt
index 448751ae..30b9b27b 100644
--- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt
+++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt
@@ -19,21 +19,43 @@
  */
 package org.jetbrains.kotlinx.spark.api.jupyter
 
-import kotlinx.serialization.Serializable
-import kotlinx.serialization.json.*
+import org.apache.spark.api.java.JavaDoubleRDD
+import org.apache.spark.api.java.JavaPairRDD
+import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.api.java.JavaRDDLike
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.Dataset
 import org.intellij.lang.annotations.Language
-import org.jetbrains.kotlinx.jupyter.api.*
+import org.jetbrains.kotlinx.jupyter.api.Code
+import org.jetbrains.kotlinx.jupyter.api.FieldValue
+import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost
+import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult
+import org.jetbrains.kotlinx.jupyter.api.Notebook
+import org.jetbrains.kotlinx.jupyter.api.VariableDeclaration
+import org.jetbrains.kotlinx.jupyter.api.createRendererByCompileTimeType
+import org.jetbrains.kotlinx.jupyter.api.declare
 import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration
+import org.jetbrains.kotlinx.jupyter.api.textResult
+import org.jetbrains.kotlinx.spark.api.SparkSession
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.displayLimitName
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.displayTruncateName
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.scalaName
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.sparkName
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.sparkPropertiesName
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.versionName
-import kotlin.reflect.KProperty1
+import org.jetbrains.kotlinx.spark.api.kotlinEncoderFor
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.ColumnName
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
+import scala.Tuple2
+import kotlin.reflect.KClass
+import kotlin.reflect.KMutableProperty
+import kotlin.reflect.full.createType
+import kotlin.reflect.full.findAnnotation
+import kotlin.reflect.full.isSubtypeOf
+import kotlin.reflect.full.memberFunctions
+import kotlin.reflect.full.memberProperties
+import kotlin.reflect.full.primaryConstructor
+import kotlin.reflect.full.valueParameters
 import kotlin.reflect.typeOf
 
 
@@ -46,9 +68,6 @@ abstract class Integration(private val notebook: Notebook, private val options:
     protected val sparkVersion = /*$"\""+spark+"\""$*/ /*-*/ ""
     protected val version = /*$"\""+version+"\""$*/ /*-*/ ""
 
-    protected val displayLimitOld = "DISPLAY_LIMIT"
-    protected val displayTruncateOld = "DISPLAY_TRUNCATE"
-
     protected val properties: Properties
         get() = notebook
             .variablesState[sparkPropertiesName]!!
@@ -101,6 +120,7 @@ abstract class Integration(private val notebook: Notebook, private val options:
     )
 
     open val imports: Array<String> = arrayOf(
+        "org.jetbrains.kotlinx.spark.api.plugin.annotations.*",
         "org.jetbrains.kotlinx.spark.api.*",
         "org.jetbrains.kotlinx.spark.api.tuples.*",
         *(1..22).map { "scala.Tuple$it" }.toTypedArray(),
@@ -116,6 +136,9 @@ abstract class Integration(private val notebook: Notebook, private val options:
         "org.apache.spark.streaming.*",
     )
 
+    // Needs to be set by integration
+    var spark: SparkSession? = null
+
     override fun Builder.onLoaded() {
         dependencies(*dependencies)
         import(*imports)
@@ -135,35 +158,15 @@ abstract class Integration(private val notebook: Notebook, private val options:
                 )
             )
 
-            @Language("kts")
-            val _0 = execute(
-                """
-                @Deprecated("Use ${displayLimitName}=${properties.displayLimit} in %use magic or ${sparkPropertiesName}.${displayLimitName} = ${properties.displayLimit} instead", ReplaceWith("${sparkPropertiesName}.${displayLimitName}"))
-                var $displayLimitOld: Int
-                    get() = ${sparkPropertiesName}.${displayLimitName}
-                    set(value) {
-                        println("$displayLimitOld is deprecated: Use ${sparkPropertiesName}.${displayLimitName} instead")
-                        ${sparkPropertiesName}.${displayLimitName} = value
-                    }
-                
-                @Deprecated("Use ${displayTruncateName}=${properties.displayTruncate} in %use magic or ${sparkPropertiesName}.${displayTruncateName} = ${properties.displayTruncate} instead", ReplaceWith("${sparkPropertiesName}.${displayTruncateName}"))
-                var $displayTruncateOld: Int
-                    get() = ${sparkPropertiesName}.${displayTruncateName}
-                    set(value) {
-                        println("$displayTruncateOld is deprecated: Use ${sparkPropertiesName}.${displayTruncateName} instead")
-                        ${sparkPropertiesName}.${displayTruncateName} = value
-                    }
-            """.trimIndent()
-            )
-
             onLoaded()
         }
 
         beforeCellExecution {
-            if (scalaCompatVersion.toDouble() >= 2.13)
+            if (scalaCompatVersion.toDouble() >= 2.13) {
                 execute("scala.`Console\$`.`MODULE\$`.setOutDirect(System.out)")
-            else
+            } else {
                 execute("""scala.Console.setOut(System.out)""")
+            }
 
             beforeCellExecution()
         }
@@ -180,27 +183,123 @@ abstract class Integration(private val notebook: Notebook, private val options:
             onShutdown()
         }
 
+        onClassAnnotation<Sparkify> {
+            for (klass in it) {
+                if (klass.isData) {
+                    execute(generateSparkifyClass(klass))
+                }
+            }
+        }
 
         // Render Dataset
         render<Dataset<*>> {
-            with(properties) {
-                HTML(it.toHtml(limit = displayLimit, truncate = displayTruncate))
-            }
+            renderDataset(it)
         }
 
-        render<RDD<*>> {
-            with(properties) {
-                HTML(it.toJavaRDD().toHtml(limit = displayLimit, truncate = displayTruncate))
+        // using compile time KType, convert this JavaRDDLike to Dataset and render it
+        notebook.renderersProcessor.registerWithoutOptimizing(
+            createRendererByCompileTimeType<JavaRDDLike<*, *>> {
+                if (spark == null) return@createRendererByCompileTimeType it.value.toString()
+
+                val rdd = (it.value as JavaRDDLike<*, *>).rdd()
+                val type = when {
+                    it.type.isSubtypeOf(typeOf<JavaDoubleRDD>()) ->
+                        typeOf<Double>()
+
+                    it.type.isSubtypeOf(typeOf<JavaPairRDD<*, *>>()) ->
+                        Tuple2::class.createType(
+                            listOf(
+                                it.type.arguments.first(),
+                                it.type.arguments.last(),
+                            )
+                        )
+
+                    it.type.isSubtypeOf(typeOf<JavaRDD<*>>()) ->
+                        it.type.arguments.first().type!!
+
+                    else -> it.type.arguments.first().type!!
+                }
+                val ds = spark!!.createDataset(rdd, kotlinEncoderFor(type))
+                renderDataset(ds)
             }
-        }
+        )
 
-        render<JavaRDDLike<*, *>> {
-            with(properties) {
-                HTML(it.toHtml(limit = displayLimit, truncate = displayTruncate))
+        // using compile time KType, convert this RDD to Dataset and render it
+        notebook.renderersProcessor.registerWithoutOptimizing(
+            createRendererByCompileTimeType<RDD<*>> {
+                if (spark == null) return@createRendererByCompileTimeType it.value.toString()
+
+                val rdd = it.value as RDD<*>
+                val type = it.type.arguments.first().type!!
+                val ds = spark!!.createDataset(rdd, kotlinEncoderFor(type))
+                renderDataset(ds)
             }
+        )
 
+        onLoadedAlsoDo()
+    }
+
+    private fun renderDataset(it: Dataset<*>): MimeTypedResult =
+        with(properties) {
+//            val showFunction = Dataset::class
+//                .memberFunctions
+//                .firstOrNull { it.name == "showString" && it.valueParameters.size == 3 }
+//
+//            textResult(
+//                if (showFunction != null) {
+//                    showFunction.call(it, displayLimit, displayTruncate, false) as String
+//                } else {
+//                    // if the function cannot be called, make sure it will call println instead
+//                    it.show(displayLimit, displayTruncate)
+//                    ""
+//                }
+//            )
+
+            // don't actually render, instead use `show()`, which calls System.out
+            it.show(displayLimit, displayTruncate)
+            textResult("")
         }
 
-        onLoadedAlsoDo()
+
+    // TODO wip
+    private fun generateSparkifyClass(klass: KClass<*>): Code {
+//        val name = "`${klass.simpleName!!}${'$'}Generated`"
+        val name = klass.simpleName
+        val constructorArgs = klass.primaryConstructor!!.parameters
+        val visibility = klass.visibility?.name?.lowercase() ?: ""
+        val memberProperties = klass.memberProperties
+
+        val properties = constructorArgs.associateWith {
+            memberProperties.first { it.name == it.name }
+        }
+
+        val constructorParamsCode = properties.entries.joinToString("\n") { (param, prop) ->
+            // TODO check override
+            if (param.isOptional) TODO()
+            val modifier = if (prop is KMutableProperty<*>) "var" else "val"
+            val paramVisiblity = prop.visibility?.name?.lowercase() ?: ""
+            val columnName = param.findAnnotation<ColumnName>()?.name ?: param.name!!
+
+            "|     @get:kotlin.jvm.JvmName(\"$columnName\") $paramVisiblity $modifier ${param.name}: ${param.type},"
+        }
+
+        val productElementWhenParamsCode = properties.entries.joinToString("\n") { (param, _) ->
+            "|        ${param.index} -> this.${param.name}"
+        }
+
+        @Language("kotlin")
+        val code = """
+            |$visibility data class $name(
+            $constructorParamsCode
+            |): scala.Product, java.io.Serializable {
+            |    override fun canEqual(that: Any?): Boolean = that is $name
+            |    override fun productArity(): Int = ${constructorArgs.size}
+            |    override fun productElement(n: Int): Any = when (n) {
+            $productElementWhenParamsCode
+            |        else -> throw IndexOutOfBoundsException()
+            |    }
+            |}
+        """.trimMargin()
+        return code
     }
 }
diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt
index cc308116..0c4eb096 100644
--- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt
+++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt
@@ -25,6 +25,7 @@ package org.jetbrains.kotlinx.spark.api.jupyter
 import org.intellij.lang.annotations.Language
 import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost
 import org.jetbrains.kotlinx.jupyter.api.Notebook
+import org.jetbrains.kotlinx.spark.api.SparkSession
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.appNameName
 import org.jetbrains.kotlinx.spark.api.jupyter.Properties.Companion.sparkMasterName
 
@@ -86,7 +87,7 @@ class SparkIntegration(notebook: Notebook, options: MutableMap<String, String?>)
             """
                 inline fun <reified T> dfOf(vararg arg: T): Dataset<Row> = spark.dfOf(*arg)""".trimIndent(),
             """
-                inline fun <reified T> emptyDataset(): Dataset<T> = spark.emptyDataset(encoder<T>())""".trimIndent(),
+                inline fun <reified T> emptyDataset(): Dataset<T> = spark.emptyDataset(kotlinEncoderFor<T>())""".trimIndent(),
             """
                 inline fun <reified T> dfOf(colNames: Array<String>, vararg arg: T): Dataset<Row> = spark.dfOf(colNames, *arg)""".trimIndent(),
             """
@@ -108,6 +109,8 @@ class SparkIntegration(notebook: Notebook, options: MutableMap<String, String?>)
             """
                 inline fun <RETURN, reified NAMED_UDF : NamedUserDefinedFunction<RETURN, *>> UserDefinedFunction<RETURN, NAMED_UDF>.register(name: String): NAMED_UDF = spark.udf().register(name = name, udf = this)""".trimIndent(),
         ).map(::execute)
+
+        spark = execute("spark").value as SparkSession
     }
 
     override fun KotlinKernelHost.onShutdown() {
diff --git a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt
index b82512b7..9368ebc4 100644
--- a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt
+++ b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt
@@ -31,16 +31,14 @@ import jupyter.kotlin.DependsOn
 import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.streaming.api.java.JavaStreamingContext
 import org.intellij.lang.annotations.Language
-import org.jetbrains.kotlinx.jupyter.EvalRequestData
-import org.jetbrains.kotlinx.jupyter.MutableNotebook
-import org.jetbrains.kotlinx.jupyter.ReplForJupyter
-import org.jetbrains.kotlinx.jupyter.ReplForJupyterImpl
 import org.jetbrains.kotlinx.jupyter.api.Code
 import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult
-import org.jetbrains.kotlinx.jupyter.libraries.EmptyResolutionInfoProvider
-import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx
+import org.jetbrains.kotlinx.jupyter.api.MimeTypes
+import org.jetbrains.kotlinx.jupyter.libraries.createLibraryHttpUtil
+import org.jetbrains.kotlinx.jupyter.repl.EvalRequestData
+import org.jetbrains.kotlinx.jupyter.repl.ReplForJupyter
 import org.jetbrains.kotlinx.jupyter.repl.creating.createRepl
-import org.jetbrains.kotlinx.jupyter.testkit.JupyterReplTestCase
+import org.jetbrains.kotlinx.jupyter.repl.result.EvalResultEx
 import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider
 import org.jetbrains.kotlinx.jupyter.util.PatternNameAcceptanceRule
 import org.jetbrains.kotlinx.spark.api.SparkSession
@@ -52,6 +50,7 @@ class JupyterTests : ShouldSpec({
 
     val replProvider = ReplProvider { classpath ->
         createRepl(
+            httpUtil = createLibraryHttpUtil(),
             scriptClasspath = classpath,
             isEmbedded = true,
         ).apply {
@@ -83,10 +82,11 @@ class JupyterTests : ShouldSpec({
 
     context("Jupyter") {
         withRepl {
+            exec("%trackExecution")
 
             should("Allow functions on local data classes") {
                 @Language("kts")
-                val klass = exec("""data class Test(val a: Int, val b: String)""")
+                val klass = exec("""@Sparkify data class Test(val a: Int, val b: String)""")
 
                 @Language("kts")
                 val ds = exec("""val ds = dsOf(Test(1, "hi"), Test(2, "something"))""")
@@ -110,9 +110,9 @@ class JupyterTests : ShouldSpec({
                 sc as? JavaSparkContext shouldNotBe null
             }
 
-            should("render Datasets") {
+            xshould("render Datasets") {
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val ds = listOf(1, 2, 3).toDS()
                     ds
@@ -126,9 +126,9 @@ class JupyterTests : ShouldSpec({
                 html shouldContain "3"
             }
 
-            should("render JavaRDDs") {
+            xshould("render JavaRDDs") {
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val rdd: JavaRDD<List<Int>> = listOf(
                         listOf(1, 2, 3), 
@@ -143,9 +143,9 @@ class JupyterTests : ShouldSpec({
                 html shouldContain "4, 5, 6"
             }
 
-            should("render JavaRDDs with Arrays") {
+            xshould("render JavaRDDs with Arrays") {
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val rdd: JavaRDD<IntArray> = rddOf(
                         intArrayOf(1, 2, 3), 
@@ -160,12 +160,12 @@ class JupyterTests : ShouldSpec({
                 html shouldContain "4, 5, 6"
             }
 
-            should("render JavaRDDs with custom class") {
+            xshould("render JavaRDDs with custom class") {
 
                 @Language("kts")
                 val klass = exec(
                     """
-                    data class Test(
+                    @Sparkify data class Test(
                         val longFirstName: String,
                         val second: LongArray,
                         val somethingSpecial: Map<Int, String>,
@@ -174,7 +174,7 @@ class JupyterTests : ShouldSpec({
                 )
 
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val rdd =
                         listOf(
@@ -185,29 +185,40 @@ class JupyterTests : ShouldSpec({
                     rdd
                     """.trimIndent()
                 )
-                html shouldContain "Test(longFirstName=aaaaaaaa..."
+                html shouldContain """
+                    +-------------+---------------+--------------------+
+                    |longFirstName|         second|    somethingSpecial|
+                    +-------------+---------------+--------------------+
+                    |    aaaaaaaaa|[1, 100000, 24]|{1 -> one, 2 -> two}|
+                    |    aaaaaaaaa|[1, 100000, 24]|{1 -> one, 2 -> two}|
+                    +-------------+---------------+--------------------+""".trimIndent()
             }
 
-            should("render JavaPairRDDs") {
+            xshould("render JavaPairRDDs") {
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val rdd: JavaPairRDD<Int, Int> = rddOf(
-                        c(1, 2).toTuple(),
-                        c(3, 4).toTuple(),
+                        t(1, 2),
+                        t(3, 4),
                     ).toJavaPairRDD()
                     rdd
                     """.trimIndent()
                 )
                 println(html)
 
-                html shouldContain "1, 2"
-                html shouldContain "3, 4"
+                html shouldContain """
+                    +---+---+
+                    | _1| _2|
+                    +---+---+
+                    |  1|  2|
+                    |  3|  4|
+                    +---+---+""".trimIndent()
             }
 
-            should("render JavaDoubleRDD") {
+            xshould("render JavaDoubleRDD") {
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val rdd: JavaDoubleRDD = rddOf(1.0, 2.0, 3.0, 4.0,).toJavaDoubleRDD()
                     rdd
@@ -221,9 +232,9 @@ class JupyterTests : ShouldSpec({
                 html shouldContain "4.0"
             }
 
-            should("render Scala RDD") {
+            xshould("render Scala RDD") {
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                     val rdd: RDD<List<Int>> = rddOf(
                         listOf(1, 2, 3), 
@@ -238,15 +249,15 @@ class JupyterTests : ShouldSpec({
                 html shouldContain "4, 5, 6"
             }
 
-            should("truncate dataset cells using properties") {
+            xshould("truncate dataset cells using properties") {
 
                 @Language("kts")
                 val oldTruncation = exec("""sparkProperties.displayTruncate""") as Int
 
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
-                        data class Test(val a: String)
+                        @Sparkify data class Test(val a: String)
                         sparkProperties.displayTruncate = 3
                         dsOf(Test("aaaaaaaaaa"))
                     """.trimIndent()
@@ -255,19 +266,19 @@ class JupyterTests : ShouldSpec({
                 @Language("kts")
                 val restoreTruncation = exec("""sparkProperties.displayTruncate = $oldTruncation""")
 
-                html shouldContain "<td>aaa</td>"
-                html shouldNotContain "<td>aaaaaaaaaa</td>"
+                html shouldContain "aaa"
+                html shouldNotContain "aaaaaaaaaa"
             }
 
-            should("limit dataset rows using properties") {
+            xshould("limit dataset rows using properties") {
 
                 @Language("kts")
                 val oldLimit = exec("""sparkProperties.displayLimit""") as Int
 
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
-                        data class Test(val a: String)
+                        @Sparkify data class Test(val a: String)
                         sparkProperties.displayLimit = 3
                         dsOf(Test("a"), Test("b"), Test("c"), Test("d"), Test("e"))
                     """.trimIndent()
@@ -276,20 +287,20 @@ class JupyterTests : ShouldSpec({
                 @Language("kts")
                 val restoreLimit = exec("""sparkProperties.displayLimit = $oldLimit""")
 
-                html shouldContain "<td>a</td>"
-                html shouldContain "<td>b</td>"
-                html shouldContain "<td>c</td>"
-                html shouldNotContain "<td>d</td>"
-                html shouldNotContain "<td>e</td>"
+                html shouldContain "a|"
+                html shouldContain "b|"
+                html shouldContain "c|"
+                html shouldNotContain "d|"
+                html shouldNotContain "e|"
             }
 
-            should("truncate rdd cells using properties") {
+            xshould("truncate rdd cells using properties") {
 
                 @Language("kts")
                 val oldTruncation = exec("""sparkProperties.displayTruncate""") as Int
 
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                         sparkProperties.displayTruncate = 3
                         rddOf("aaaaaaaaaa")
@@ -299,17 +310,17 @@ class JupyterTests : ShouldSpec({
                 @Language("kts")
                 val restoreTruncation = exec("""sparkProperties.displayTruncate = $oldTruncation""")
 
-                html shouldContain "<td>aaa</td>"
-                html shouldNotContain "<td>aaaaaaaaaa</td>"
+                html shouldContain "aaa"
+                html shouldNotContain "aaaaaaaaaa"
             }
 
-            should("limit rdd rows using properties") {
+            xshould("limit rdd rows using properties") {
 
                 @Language("kts")
                 val oldLimit = exec("""sparkProperties.displayLimit""") as Int
 
                 @Language("kts")
-                val html = execHtml(
+                val html = execForDisplayText(
                     """
                         sparkProperties.displayLimit = 3
                         rddOf("a", "b", "c", "d", "e")
@@ -319,11 +330,11 @@ class JupyterTests : ShouldSpec({
                 @Language("kts")
                 val restoreLimit = exec("""sparkProperties.displayLimit = $oldLimit""")
 
-                html shouldContain "<td>a</td>"
-                html shouldContain "<td>b</td>"
-                html shouldContain "<td>c</td>"
-                html shouldNotContain "<td>d</td>"
-                html shouldNotContain "<td>e</td>"
+                html shouldContain " a|"
+                html shouldContain " b|"
+                html shouldContain " c|"
+                html shouldNotContain " d|"
+                html shouldNotContain " e|"
             }
 
             @Language("kts")
@@ -335,6 +346,7 @@ class JupyterTests : ShouldSpec({
 class JupyterStreamingTests : ShouldSpec({
     val replProvider = ReplProvider { classpath ->
         createRepl(
+            httpUtil = createLibraryHttpUtil(),
             scriptClasspath = classpath,
             isEmbedded = true,
         ).apply {
@@ -364,7 +376,7 @@ class JupyterStreamingTests : ShouldSpec({
     fun createRepl(): ReplForJupyter = replProvider(scriptClasspath)
     suspend fun withRepl(action: suspend ReplForJupyter.() -> Unit): Unit = createRepl().action()
 
-    context("Jupyter") {
+    xcontext("Jupyter") {
         withRepl {
 
             // For when onInterrupt is implemented in the Jupyter kernel
@@ -391,7 +403,7 @@ class JupyterStreamingTests : ShouldSpec({
                 }
             }
 
-            xshould("stream") {
+            should("stream") {
 
                 @Language("kts")
                 val value = exec(
@@ -440,9 +452,7 @@ class JupyterStreamingTests : ShouldSpec({
 
 private fun ReplForJupyter.execEx(code: Code): EvalResultEx = evalEx(EvalRequestData(code))
 
-private fun ReplForJupyter.exec(code: Code): Any? = execEx(code).renderedValue
-
-private fun ReplForJupyter.execRaw(code: Code): Any? = execEx(code).rawValue
+private fun ReplForJupyter.exec(code: Code): Any? = (execEx(code) as? EvalResultEx.Success)?.renderedValue
 
 @JvmName("execTyped")
 private inline fun <reified T : Any> ReplForJupyter.exec(code: Code): T {
@@ -458,4 +468,11 @@ private fun ReplForJupyter.execHtml(code: Code): String {
     return html
 }
 
+private fun ReplForJupyter.execForDisplayText(code: Code): String {
+    val res = exec<MimeTypedResult>(code)
+    val text = res[MimeTypes.PLAIN_TEXT]
+    text.shouldNotBeNull()
+    return text
+}
+
 class Counter(@Volatile var value: Int) : Serializable
diff --git a/kotlin-spark-api/build.gradle.kts b/kotlin-spark-api/build.gradle.kts
index 2691836a..9e0097d7 100644
--- a/kotlin-spark-api/build.gradle.kts
+++ b/kotlin-spark-api/build.gradle.kts
@@ -1,4 +1,4 @@
-@file:Suppress("UnstableApiUsage", "NOTHING_TO_INLINE")
+@file:Suppress("UnstableApiUsage")
 
 import com.igormaznitsa.jcp.gradle.JcpTask
 import com.vanniktech.maven.publish.JavadocJar.Dokka
@@ -11,6 +11,7 @@ plugins {
     mavenPublishBase
     jcp
     idea
+    kotlinSparkApi // for @Sparkify
 }
 
 group = Versions.groupID
@@ -19,6 +20,7 @@ version = Versions.project
 
 repositories {
     mavenCentral()
+    mavenLocal()
 }
 
 tasks.withType<Test>().configureEach {
@@ -28,24 +30,27 @@ tasks.withType<Test>().configureEach {
 
 dependencies {
 
-    with(Projects) {
+    Projects {
         api(
-            core,
-            scalaTuplesInKotlin,
+            scalaHelpers,
+            scalaTuplesInKotlin
         )
     }
 
-    with(Dependencies) {
+    Dependencies {
 
         // https://github.com/FasterXML/jackson-bom/issues/52
         if (Versions.spark == "3.3.1") implementation(jacksonDatabind)
 
+        // if (Versions.sparkConnect) TODO("unsupported for now")
+
         implementation(
             kotlinStdLib,
             reflect,
             sparkSql,
             sparkStreaming,
             hadoopClient,
+            kotlinDateTime,
         )
 
         testImplementation(
@@ -66,10 +71,10 @@ dependencies {
 val kotlinMainSources = kotlin.sourceSets.main.get().kotlin.sourceDirectories
 
 val preprocessMain by tasks.creating(JcpTask::class) {
-    sources.set(kotlinMainSources)
-    clearTarget.set(true)
-    fileExtensions.set(listOf("kt"))
-    vars.set(Versions.versionMap)
+    sources = kotlinMainSources
+    clearTarget = true
+    fileExtensions = listOf("kt")
+    vars = Versions.versionMap
     outputs.upToDateWhen { target.get().exists() }
 }
 
@@ -105,10 +110,10 @@ tasks.compileKotlin {
 val kotlinTestSources = kotlin.sourceSets.test.get().kotlin.sourceDirectories
 
 val preprocessTest by tasks.creating(JcpTask::class) {
-    sources.set(kotlinTestSources)
-    clearTarget.set(true)
-    fileExtensions.set(listOf("kt"))
-    vars.set(Versions.versionMap)
+    sources = kotlinTestSources
+    clearTarget = true
+    fileExtensions = listOf("kt")
+    vars = Versions.versionMap
     outputs.upToDateWhen { target.get().exists() }
 }
 
@@ -141,9 +146,7 @@ tasks.compileTestKotlin {
 
 kotlin {
     jvmToolchain {
-        languageVersion.set(
-            JavaLanguageVersion.of(Versions.jvmTarget)
-        )
+        languageVersion = JavaLanguageVersion.of(Versions.jvmTarget)
     }
 }
 
@@ -158,8 +161,3 @@ tasks.withType<AbstractDokkaLeafTask> {
 mavenPublishing {
     configure(KotlinJvm(Dokka("dokkaHtml")))
 }
-
-
-
-
-
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt
deleted file mode 100644
index ed405f6a..00000000
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt
+++ /dev/null
@@ -1,856 +0,0 @@
-/*-
- * =LICENSE=
- * Kotlin Spark API
- * ----------
- * Copyright (C) 2019 - 2020 JetBrains
- * ----------
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * =LICENSEEND=
- */
-
-/**
- * DEPRECATED: Use Scala tuples instead.
- *
- * Helper classes and functions to work with unnamed tuples we call Arities.
- * Arities are easier to work with in Kotlin than Scala Tuples since they are Kotlin data classes.
- * This means they can be destructured, copied, etc.
- * Finally, the Arities are Serializable, meaning they can be used inside RDDs and they can be broadcast.
- *
- * Example:
- * ```kotlin
- * // creation
- * val tuple: Arity3<Int, String, Double> = c(1, "test", 1.0)
- *
- * // addition
- * val newTuple: Arity5<Int, String, Double, Int, Int> = tuple + c(1, 2)
- *
- * // destructuring
- * val dataset: Dataset<Arity2<Int, Double>> = ...
- * dataset.map { (a: Int, b: Double) ->
- *    (a + b).toString()
- * }
- * ```
- */
-@file:Suppress("DEPRECATION")
-package org.jetbrains.kotlinx.spark.api
-
-import java.io.Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple1<T1>(_1)", "scala.Tuple1"))
-data class Arity1<T1>(val _1: T1): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple2<T1, T2>(_1, _2)", "scala.Tuple2"))
-data class Arity2<T1, T2>(val _1: T1, val _2: T2): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple3<T1, T2, T3>(_1, _2, _3)", "scala.Tuple3"))
-data class Arity3<T1, T2, T3>(val _1: T1, val _2: T2, val _3: T3): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple4<T1, T2, T3, T4>(_1, _2, _3, _4)", "scala.Tuple4"))
-data class Arity4<T1, T2, T3, T4>(val _1: T1, val _2: T2, val _3: T3, val _4: T4): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple5<T1, T2, T3, T4, T5>(_1, _2, _3, _4, _5)", "scala.Tuple5"))
-data class Arity5<T1, T2, T3, T4, T5>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple6<T1, T2, T3, T4, T5, T6>(_1, _2, _3, _4, _5, _6)", "scala.Tuple6"))
-data class Arity6<T1, T2, T3, T4, T5, T6>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple7<T1, T2, T3, T4, T5, T6, T7>(_1, _2, _3, _4, _5, _6, _7)", "scala.Tuple7"))
-data class Arity7<T1, T2, T3, T4, T5, T6, T7>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>(_1, _2, _3, _4, _5, _6, _7, _8)", "scala.Tuple8"))
-data class Arity8<T1, T2, T3, T4, T5, T6, T7, T8>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(_1, _2, _3, _4, _5, _6, _7, _8, _9)", "scala.Tuple9"))
-data class Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10)", "scala.Tuple10"))
-data class Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)", "scala.Tuple11"))
-data class Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12)", "scala.Tuple12"))
-data class Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13)", "scala.Tuple13"))
-data class Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14)", "scala.Tuple14"))
-data class Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)", "scala.Tuple15"))
-data class Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)", "scala.Tuple16"))
-data class Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)", "scala.Tuple17"))
-data class Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)", "scala.Tuple18"))
-data class Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)", "scala.Tuple19"))
-data class Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)", "scala.Tuple20"))
-data class Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)", "scala.Tuple21"))
-data class Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21): Serializable
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)", "scala.Tuple22"))
-data class Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22): Serializable
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-data class Arity23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23): Serializable
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-data class Arity24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24): Serializable
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-data class Arity25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25): Serializable
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-data class Arity26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25, val _26: T26): Serializable
-
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1)"))
-fun <T1> c(_1: T1): Arity1<T1> = Arity1<T1>(_1)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2)"))
-fun <T1, T2> c(_1: T1, _2: T2): Arity2<T1, T2> = Arity2<T1, T2>(_1, _2)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3)"))
-fun <T1, T2, T3> c(_1: T1, _2: T2, _3: T3): Arity3<T1, T2, T3> = Arity3<T1, T2, T3>(_1, _2, _3)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4)"))
-fun <T1, T2, T3, T4> c(_1: T1, _2: T2, _3: T3, _4: T4): Arity4<T1, T2, T3, T4> = Arity4<T1, T2, T3, T4>(_1, _2, _3, _4)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5)"))
-fun <T1, T2, T3, T4, T5> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5): Arity5<T1, T2, T3, T4, T5> = Arity5<T1, T2, T3, T4, T5>(_1, _2, _3, _4, _5)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6)"))
-fun <T1, T2, T3, T4, T5, T6> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6): Arity6<T1, T2, T3, T4, T5, T6> = Arity6<T1, T2, T3, T4, T5, T6>(_1, _2, _3, _4, _5, _6)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7)"))
-fun <T1, T2, T3, T4, T5, T6, T7> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7): Arity7<T1, T2, T3, T4, T5, T6, T7> = Arity7<T1, T2, T3, T4, T5, T6, T7>(_1, _2, _3, _4, _5, _6, _7)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8): Arity8<T1, T2, T3, T4, T5, T6, T7, T8> = Arity8<T1, T2, T3, T4, T5, T6, T7, T8>(_1, _2, _3, _4, _5, _6, _7, _8)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9): Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9> = Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(_1, _2, _3, _4, _5, _6, _7, _8, _9)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10): Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> = Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11): Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> = Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12): Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> = Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13): Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> = Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14): Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> = Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15): Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> = Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16): Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> = Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17): Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> = Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18): Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> = Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19): Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> = Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20): Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> = Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21): Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> = Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)
-
-@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)"))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> = Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23): Arity23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> = Arity23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23)
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24): Arity24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> = Arity24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24)
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25): Arity25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> = Arity25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25)
-
-@Deprecated("Use Scala tuples instead. They only reach 22 values.")
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25, _26: T26): Arity26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> = Arity26<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26)
-
-
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2> Arity1<T1>.plus(that: Arity1<T2>) = Arity2(this._1, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3> Arity1<T1>.plus(that: Arity2<T2, T3>) = Arity3(this._1, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3> Arity2<T1, T2>.plus(that: Arity1<T3>) = Arity3(this._1, this._2, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4> Arity1<T1>.plus(that: Arity3<T2, T3, T4>) = Arity4(this._1, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4> Arity2<T1, T2>.plus(that: Arity2<T3, T4>) = Arity4(this._1, this._2, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4> Arity3<T1, T2, T3>.plus(that: Arity1<T4>) = Arity4(this._1, this._2, this._3, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5> Arity1<T1>.plus(that: Arity4<T2, T3, T4, T5>) = Arity5(this._1, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5> Arity2<T1, T2>.plus(that: Arity3<T3, T4, T5>) = Arity5(this._1, this._2, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5> Arity3<T1, T2, T3>.plus(that: Arity2<T4, T5>) = Arity5(this._1, this._2, this._3, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5> Arity4<T1, T2, T3, T4>.plus(that: Arity1<T5>) = Arity5(this._1, this._2, this._3, this._4, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6> Arity1<T1>.plus(that: Arity5<T2, T3, T4, T5, T6>) = Arity6(this._1, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6> Arity2<T1, T2>.plus(that: Arity4<T3, T4, T5, T6>) = Arity6(this._1, this._2, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6> Arity3<T1, T2, T3>.plus(that: Arity3<T4, T5, T6>) = Arity6(this._1, this._2, this._3, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6> Arity4<T1, T2, T3, T4>.plus(that: Arity2<T5, T6>) = Arity6(this._1, this._2, this._3, this._4, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity1<T6>) = Arity6(this._1, this._2, this._3, this._4, this._5, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7> Arity1<T1>.plus(that: Arity6<T2, T3, T4, T5, T6, T7>) = Arity7(this._1, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7> Arity2<T1, T2>.plus(that: Arity5<T3, T4, T5, T6, T7>) = Arity7(this._1, this._2, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7> Arity3<T1, T2, T3>.plus(that: Arity4<T4, T5, T6, T7>) = Arity7(this._1, this._2, this._3, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7> Arity4<T1, T2, T3, T4>.plus(that: Arity3<T5, T6, T7>) = Arity7(this._1, this._2, this._3, this._4, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity2<T6, T7>) = Arity7(this._1, this._2, this._3, this._4, this._5, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity1<T7>) = Arity7(this._1, this._2, this._3, this._4, this._5, this._6, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity1<T1>.plus(that: Arity7<T2, T3, T4, T5, T6, T7, T8>) = Arity8(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity2<T1, T2>.plus(that: Arity6<T3, T4, T5, T6, T7, T8>) = Arity8(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity3<T1, T2, T3>.plus(that: Arity5<T4, T5, T6, T7, T8>) = Arity8(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity4<T1, T2, T3, T4>.plus(that: Arity4<T5, T6, T7, T8>) = Arity8(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity3<T6, T7, T8>) = Arity8(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity2<T7, T8>) = Arity8(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity1<T8>) = Arity8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity1<T1>.plus(that: Arity8<T2, T3, T4, T5, T6, T7, T8, T9>) = Arity9(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity2<T1, T2>.plus(that: Arity7<T3, T4, T5, T6, T7, T8, T9>) = Arity9(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity3<T1, T2, T3>.plus(that: Arity6<T4, T5, T6, T7, T8, T9>) = Arity9(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity4<T1, T2, T3, T4>.plus(that: Arity5<T5, T6, T7, T8, T9>) = Arity9(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity4<T6, T7, T8, T9>) = Arity9(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity3<T7, T8, T9>) = Arity9(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity2<T8, T9>) = Arity9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity1<T9>) = Arity9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity1<T1>.plus(that: Arity9<T2, T3, T4, T5, T6, T7, T8, T9, T10>) = Arity10(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity2<T1, T2>.plus(that: Arity8<T3, T4, T5, T6, T7, T8, T9, T10>) = Arity10(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity3<T1, T2, T3>.plus(that: Arity7<T4, T5, T6, T7, T8, T9, T10>) = Arity10(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity4<T1, T2, T3, T4>.plus(that: Arity6<T5, T6, T7, T8, T9, T10>) = Arity10(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity5<T6, T7, T8, T9, T10>) = Arity10(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity4<T7, T8, T9, T10>) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity3<T8, T9, T10>) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity2<T9, T10>) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity1<T10>) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity1<T1>.plus(that: Arity10<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>) = Arity11(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity2<T1, T2>.plus(that: Arity9<T3, T4, T5, T6, T7, T8, T9, T10, T11>) = Arity11(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity3<T1, T2, T3>.plus(that: Arity8<T4, T5, T6, T7, T8, T9, T10, T11>) = Arity11(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity4<T1, T2, T3, T4>.plus(that: Arity7<T5, T6, T7, T8, T9, T10, T11>) = Arity11(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity6<T6, T7, T8, T9, T10, T11>) = Arity11(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity5<T7, T8, T9, T10, T11>) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity4<T8, T9, T10, T11>) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity3<T9, T10, T11>) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity2<T10, T11>) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity1<T11>) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity1<T1>.plus(that: Arity11<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>) = Arity12(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity2<T1, T2>.plus(that: Arity10<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>) = Arity12(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity3<T1, T2, T3>.plus(that: Arity9<T4, T5, T6, T7, T8, T9, T10, T11, T12>) = Arity12(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity4<T1, T2, T3, T4>.plus(that: Arity8<T5, T6, T7, T8, T9, T10, T11, T12>) = Arity12(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity7<T6, T7, T8, T9, T10, T11, T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity6<T7, T8, T9, T10, T11, T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity5<T8, T9, T10, T11, T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity4<T9, T10, T11, T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity3<T10, T11, T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity2<T11, T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity1<T12>) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity1<T1>.plus(that: Arity12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>) = Arity13(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity2<T1, T2>.plus(that: Arity11<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity3<T1, T2, T3>.plus(that: Arity10<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity4<T1, T2, T3, T4>.plus(that: Arity9<T5, T6, T7, T8, T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity8<T6, T7, T8, T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity7<T7, T8, T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity6<T8, T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity5<T9, T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity4<T10, T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity3<T11, T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity2<T12, T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity1<T13>) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity1<T1>.plus(that: Arity13<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity2<T1, T2>.plus(that: Arity12<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity3<T1, T2, T3>.plus(that: Arity11<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity4<T1, T2, T3, T4>.plus(that: Arity10<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity9<T6, T7, T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity8<T7, T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity7<T8, T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity6<T9, T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity5<T10, T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity4<T11, T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity3<T12, T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity2<T13, T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity1<T14>) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity1<T1>.plus(that: Arity14<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity2<T1, T2>.plus(that: Arity13<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity3<T1, T2, T3>.plus(that: Arity12<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity4<T1, T2, T3, T4>.plus(that: Arity11<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity10<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity9<T7, T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity8<T8, T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity7<T9, T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity6<T10, T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity5<T11, T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity4<T12, T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity3<T13, T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity2<T14, T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity1<T15>) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity1<T1>.plus(that: Arity15<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity2<T1, T2>.plus(that: Arity14<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity3<T1, T2, T3>.plus(that: Arity13<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity4<T1, T2, T3, T4>.plus(that: Arity12<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity11<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity10<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity9<T8, T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity8<T9, T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity7<T10, T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity6<T11, T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity5<T12, T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity4<T13, T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity3<T14, T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity2<T15, T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity1<T16>) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity1<T1>.plus(that: Arity16<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity2<T1, T2>.plus(that: Arity15<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity3<T1, T2, T3>.plus(that: Arity14<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity4<T1, T2, T3, T4>.plus(that: Arity13<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity12<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity11<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity10<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity9<T9, T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity8<T10, T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity7<T11, T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity6<T12, T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity5<T13, T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity4<T14, T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity3<T15, T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity2<T16, T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity1<T17>) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity1<T1>.plus(that: Arity17<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity2<T1, T2>.plus(that: Arity16<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity3<T1, T2, T3>.plus(that: Arity15<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity4<T1, T2, T3, T4>.plus(that: Arity14<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity13<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity12<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity11<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity10<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity9<T10, T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity8<T11, T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity7<T12, T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity6<T13, T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity5<T14, T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity4<T15, T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity3<T16, T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity2<T17, T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity1<T18>) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity1<T1>.plus(that: Arity18<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity2<T1, T2>.plus(that: Arity17<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity3<T1, T2, T3>.plus(that: Arity16<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity4<T1, T2, T3, T4>.plus(that: Arity15<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity14<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity13<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity12<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity11<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity10<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity9<T11, T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity8<T12, T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity7<T13, T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity6<T14, T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity5<T15, T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity4<T16, T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity3<T17, T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity2<T18, T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity1<T19>) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity1<T1>.plus(that: Arity19<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity2<T1, T2>.plus(that: Arity18<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity3<T1, T2, T3>.plus(that: Arity17<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity4<T1, T2, T3, T4>.plus(that: Arity16<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity15<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity14<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity13<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity12<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity11<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity10<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity9<T12, T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity8<T13, T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity7<T14, T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity6<T15, T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity5<T16, T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity4<T17, T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity3<T18, T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity2<T19, T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity1<T20>) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity1<T1>.plus(that: Arity20<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity2<T1, T2>.plus(that: Arity19<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity3<T1, T2, T3>.plus(that: Arity18<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity4<T1, T2, T3, T4>.plus(that: Arity17<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity16<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity15<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity14<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity13<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity12<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity11<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity10<T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity9<T13, T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity8<T14, T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity7<T15, T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity6<T16, T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity5<T17, T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity4<T18, T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity3<T19, T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity2<T20, T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.plus(that: Arity1<T21>) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity1<T1>.plus(that: Arity21<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity2<T1, T2>.plus(that: Arity20<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity3<T1, T2, T3>.plus(that: Arity19<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity4<T1, T2, T3, T4>.plus(that: Arity18<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity17<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity16<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity15<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity14<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity13<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity12<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity11<T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity10<T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity9<T14, T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity8<T15, T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity7<T16, T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity6<T17, T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity5<T18, T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity4<T19, T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity3<T20, T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.plus(that: Arity2<T21, T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.plus(that: Arity1<T22>) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity1<T1>.plus(that: Arity22<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity2<T1, T2>.plus(that: Arity21<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity3<T1, T2, T3>.plus(that: Arity20<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity4<T1, T2, T3, T4>.plus(that: Arity19<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity18<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity17<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity16<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity15<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity14<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity13<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity12<T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity11<T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity10<T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity9<T15, T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity8<T16, T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity7<T17, T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity6<T18, T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity5<T19, T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity4<T20, T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.plus(that: Arity3<T21, T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.plus(that: Arity2<T22, T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23> Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.plus(that: Arity1<T23>) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity1<T1>.plus(that: Arity23<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity2<T1, T2>.plus(that: Arity22<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity3<T1, T2, T3>.plus(that: Arity21<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity4<T1, T2, T3, T4>.plus(that: Arity20<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity19<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity18<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity17<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity16<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity15<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity14<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity13<T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity12<T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity11<T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity10<T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity9<T16, T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity8<T17, T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity7<T18, T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity6<T19, T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity5<T20, T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.plus(that: Arity4<T21, T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.plus(that: Arity3<T22, T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.plus(that: Arity2<T23, T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24> Arity23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>.plus(that: Arity1<T24>) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity1<T1>.plus(that: Arity24<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23, that._24)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity2<T1, T2>.plus(that: Arity23<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity3<T1, T2, T3>.plus(that: Arity22<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity4<T1, T2, T3, T4>.plus(that: Arity21<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity20<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity19<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity18<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity17<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity16<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity15<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity14<T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity13<T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity12<T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity11<T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity10<T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity9<T17, T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity8<T18, T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity7<T19, T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity6<T20, T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.plus(that: Arity5<T21, T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.plus(that: Arity4<T22, T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.plus(that: Arity3<T23, T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>.plus(that: Arity2<T24, T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25> Arity24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>.plus(that: Arity1<T25>) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, this._24, that._1)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity1<T1>.plus(that: Arity25<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23, that._24, that._25)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity2<T1, T2>.plus(that: Arity24<T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23, that._24)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity3<T1, T2, T3>.plus(that: Arity23<T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity4<T1, T2, T3, T4>.plus(that: Arity22<T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity5<T1, T2, T3, T4, T5>.plus(that: Arity21<T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity6<T1, T2, T3, T4, T5, T6>.plus(that: Arity20<T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity7<T1, T2, T3, T4, T5, T6, T7>.plus(that: Arity19<T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.plus(that: Arity18<T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.plus(that: Arity17<T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.plus(that: Arity16<T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.plus(that: Arity15<T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.plus(that: Arity14<T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.plus(that: Arity13<T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.plus(that: Arity12<T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.plus(that: Arity11<T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.plus(that: Arity10<T17, T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.plus(that: Arity9<T18, T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.plus(that: Arity8<T19, T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.plus(that: Arity7<T20, T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4, that._5, that._6, that._7)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.plus(that: Arity6<T21, T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3, that._4, that._5, that._6)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.plus(that: Arity5<T22, T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2, that._3, that._4, that._5)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.plus(that: Arity4<T23, T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1, that._2, that._3, that._4)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity23<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23>.plus(that: Arity3<T24, T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, that._1, that._2, that._3)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity24<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24>.plus(that: Arity2<T25, T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, this._24, that._1, that._2)
-@Deprecated("Use Scala tuples instead.")
-infix operator fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25, T26> Arity25<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, T23, T24, T25>.plus(that: Arity1<T26>) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, this._24, this._25, that._1)
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt
index bb0d6d20..d962f57c 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt
@@ -443,7 +443,7 @@ operator fun Column.get(key: Any): Column = getItem(key)
  * @see typed
  */
 @Suppress("UNCHECKED_CAST")
-inline fun <DsType, reified U> Column.`as`(): TypedColumn<DsType, U> = `as`(encoder<U>()) as TypedColumn<DsType, U>
+inline fun <DsType, reified U> Column.`as`(): TypedColumn<DsType, U> = `as`(kotlinEncoderFor<U>()) as TypedColumn<DsType, U>
 
 /**
  * Provides a type hint about the expected return value of this column. This information can
@@ -458,7 +458,7 @@ inline fun <DsType, reified U> Column.`as`(): TypedColumn<DsType, U> = `as`(enco
  * @see typed
  */
 @Suppress("UNCHECKED_CAST")
-inline fun <DsType, reified U> TypedColumn<DsType, *>.`as`(): TypedColumn<DsType, U> = `as`(encoder<U>()) as TypedColumn<DsType, U>
+inline fun <DsType, reified U> TypedColumn<DsType, *>.`as`(): TypedColumn<DsType, U> = `as`(kotlinEncoderFor<U>()) as TypedColumn<DsType, U>
 
 /**
  * Provides a type hint about the expected return value of this column. This information can
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt
index 12b81a0b..44da4147 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt
@@ -36,6 +36,7 @@ import scala.collection.Iterable as ScalaIterable
 import scala.collection.Iterator as ScalaIterator
 import scala.collection.Map as ScalaMap
 import scala.collection.Seq as ScalaSeq
+import scala.collection.immutable.Seq as ScalaImmutableSeq
 import scala.collection.Set as ScalaSet
 import scala.collection.concurrent.Map as ScalaConcurrentMap
 import scala.collection.mutable.Buffer as ScalaMutableBuffer
@@ -124,6 +125,18 @@ fun <A> Collection<A>.asScalaIterable(): ScalaIterable<A> =
     //$scala.collection.JavaConverters.collectionAsScalaIterable<A>(this)
     //#endif
 
+//#if scalaCompat >= 2.13
+/** @see scala.jdk.javaapi.CollectionConverters.asScala for more information. */
+//#else
+//$/** @see scala.collection.JavaConverters.iterableAsScalaIterable for more information. */
+//#endif
+fun <A> Iterable<A>.asScalaSeq(): ScalaImmutableSeq<A> =
+    //#if scalaCompat >= 2.13
+    scala.jdk.javaapi.CollectionConverters.asScala<A>(this).toSeq()
+    //#else
+    //$scala.collection.immutable.`Seq$`.`MODULE$`.apply<A>(scala.collection.JavaConverters.iterableAsScalaIterable<A>(this).toSeq()) as ScalaImmutableSeq<A>
+    //#endif
+
 //#if scalaCompat >= 2.13
 /** @see scala.jdk.javaapi.CollectionConverters.asScala for more information. */
 //#else
@@ -363,803 +376,3 @@ fun <A, B> ScalaConcurrentMap<A, B>.asKotlinConcurrentMap(): ConcurrentMap<A, B>
     //#else
     //$scala.collection.JavaConverters.mapAsJavaConcurrentMap<A, B>(this)
     //#endif
-
-
-/**
- * Returns a new [Arity2] based on the arguments in the current [Pair].
- */
-@Deprecated("Use Scala tuples instead.", ReplaceWith("this.toTuple()", "scala.Tuple2"))
-fun <T1, T2> Pair<T1, T2>.toArity(): Arity2<T1, T2> = Arity2<T1, T2>(first, second)
-
-/**
- * Returns a new [Pair] based on the arguments in the current [Arity2].
- */
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2> Arity2<T1, T2>.toPair(): Pair<T1, T2> = Pair<T1, T2>(_1, _2)
-
-/**
- * Returns a new [Arity3] based on the arguments in the current [Triple].
- */
-@Deprecated("Use Scala tuples instead.", ReplaceWith("this.toTuple()", "scala.Tuple3"))
-fun <T1, T2, T3> Triple<T1, T2, T3>.toArity(): Arity3<T1, T2, T3> = Arity3<T1, T2, T3>(first, second, third)
-
-/**
- * Returns a new [Triple] based on the arguments in the current [Arity3].
- */
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3> Arity3<T1, T2, T3>.toTriple(): Triple<T1, T2, T3> = Triple<T1, T2, T3>(_1, _2, _3)
-
-
-/**
- * Returns a new Arity1 based on this Tuple1.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1> Tuple1<T1>.toArity(): Arity1<T1> = Arity1<T1>(this._1())
-
-/**
- * Returns a new Arity2 based on this Tuple2.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2> Tuple2<T1, T2>.toArity(): Arity2<T1, T2> = Arity2<T1, T2>(this._1(), this._2())
-
-/**
- * Returns a new Arity3 based on this Tuple3.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3> Tuple3<T1, T2, T3>.toArity(): Arity3<T1, T2, T3> = Arity3<T1, T2, T3>(this._1(), this._2(), this._3())
-
-/**
- * Returns a new Arity4 based on this Tuple4.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4> Tuple4<T1, T2, T3, T4>.toArity(): Arity4<T1, T2, T3, T4> =
-    Arity4<T1, T2, T3, T4>(this._1(), this._2(), this._3(), this._4())
-
-/**
- * Returns a new Arity5 based on this Tuple5.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5> Tuple5<T1, T2, T3, T4, T5>.toArity(): Arity5<T1, T2, T3, T4, T5> =
-    Arity5<T1, T2, T3, T4, T5>(this._1(), this._2(), this._3(), this._4(), this._5())
-
-/**
- * Returns a new Arity6 based on this Tuple6.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6> Tuple6<T1, T2, T3, T4, T5, T6>.toArity(): Arity6<T1, T2, T3, T4, T5, T6> =
-    Arity6<T1, T2, T3, T4, T5, T6>(this._1(), this._2(), this._3(), this._4(), this._5(), this._6())
-
-/**
- * Returns a new Arity7 based on this Tuple7.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7> Tuple7<T1, T2, T3, T4, T5, T6, T7>.toArity(): Arity7<T1, T2, T3, T4, T5, T6, T7> =
-    Arity7<T1, T2, T3, T4, T5, T6, T7>(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7())
-
-/**
- * Returns a new Arity8 based on this Tuple8.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8> Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>.toArity(): Arity8<T1, T2, T3, T4, T5, T6, T7, T8> =
-    Arity8<T1, T2, T3, T4, T5, T6, T7, T8>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8()
-    )
-
-/**
- * Returns a new Arity9 based on this Tuple9.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.toArity(): Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9> =
-    Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9()
-    )
-
-/**
- * Returns a new Arity10 based on this Tuple10.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.toArity(): Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> =
-    Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10()
-    )
-
-/**
- * Returns a new Arity11 based on this Tuple11.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.toArity(): Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> =
-    Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11()
-    )
-
-/**
- * Returns a new Arity12 based on this Tuple12.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.toArity(): Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> =
-    Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12()
-    )
-
-/**
- * Returns a new Arity13 based on this Tuple13.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.toArity(): Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> =
-    Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13()
-    )
-
-/**
- * Returns a new Arity14 based on this Tuple14.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.toArity(): Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> =
-    Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14()
-    )
-
-/**
- * Returns a new Arity15 based on this Tuple15.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.toArity(): Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> =
-    Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15()
-    )
-
-/**
- * Returns a new Arity16 based on this Tuple16.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.toArity(): Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> =
-    Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16()
-    )
-
-/**
- * Returns a new Arity17 based on this Tuple17.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Tuple17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.toArity(): Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> =
-    Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16(),
-        this._17()
-    )
-
-/**
- * Returns a new Arity18 based on this Tuple18.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Tuple18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.toArity(): Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> =
-    Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16(),
-        this._17(),
-        this._18()
-    )
-
-/**
- * Returns a new Arity19 based on this Tuple19.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Tuple19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.toArity(): Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> =
-    Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16(),
-        this._17(),
-        this._18(),
-        this._19()
-    )
-
-/**
- * Returns a new Arity20 based on this Tuple20.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Tuple20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.toArity(): Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> =
-    Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16(),
-        this._17(),
-        this._18(),
-        this._19(),
-        this._20()
-    )
-
-/**
- * Returns a new Arity21 based on this Tuple21.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Tuple21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.toArity(): Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> =
-    Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16(),
-        this._17(),
-        this._18(),
-        this._19(),
-        this._20(),
-        this._21()
-    )
-
-/**
- * Returns a new Arity22 based on this Tuple22.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Tuple22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.toArity(): Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> =
-    Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>(
-        this._1(),
-        this._2(),
-        this._3(),
-        this._4(),
-        this._5(),
-        this._6(),
-        this._7(),
-        this._8(),
-        this._9(),
-        this._10(),
-        this._11(),
-        this._12(),
-        this._13(),
-        this._14(),
-        this._15(),
-        this._16(),
-        this._17(),
-        this._18(),
-        this._19(),
-        this._20(),
-        this._21(),
-        this._22()
-    )
-
-/**
- * Returns a new Tuple1 based on this Arity1.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1> Arity1<T1>.toTuple(): Tuple1<T1> = Tuple1<T1>(this._1)
-
-/**
- * Returns a new Tuple2 based on this Arity2.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2> Arity2<T1, T2>.toTuple(): Tuple2<T1, T2> = Tuple2<T1, T2>(this._1, this._2)
-
-/**
- * Returns a new Tuple3 based on this Arity3.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3> Arity3<T1, T2, T3>.toTuple(): Tuple3<T1, T2, T3> = Tuple3<T1, T2, T3>(this._1, this._2, this._3)
-
-/**
- * Returns a new Tuple4 based on this Arity4.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4> Arity4<T1, T2, T3, T4>.toTuple(): Tuple4<T1, T2, T3, T4> =
-    Tuple4<T1, T2, T3, T4>(this._1, this._2, this._3, this._4)
-
-/**
- * Returns a new Tuple5 based on this Arity5.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5> Arity5<T1, T2, T3, T4, T5>.toTuple(): Tuple5<T1, T2, T3, T4, T5> =
-    Tuple5<T1, T2, T3, T4, T5>(this._1, this._2, this._3, this._4, this._5)
-
-/**
- * Returns a new Tuple6 based on this Arity6.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6> Arity6<T1, T2, T3, T4, T5, T6>.toTuple(): Tuple6<T1, T2, T3, T4, T5, T6> =
-    Tuple6<T1, T2, T3, T4, T5, T6>(this._1, this._2, this._3, this._4, this._5, this._6)
-
-/**
- * Returns a new Tuple7 based on this Arity7.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7> Arity7<T1, T2, T3, T4, T5, T6, T7>.toTuple(): Tuple7<T1, T2, T3, T4, T5, T6, T7> =
-    Tuple7<T1, T2, T3, T4, T5, T6, T7>(this._1, this._2, this._3, this._4, this._5, this._6, this._7)
-
-/**
- * Returns a new Tuple8 based on this Arity8.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8> Arity8<T1, T2, T3, T4, T5, T6, T7, T8>.toTuple(): Tuple8<T1, T2, T3, T4, T5, T6, T7, T8> =
-    Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8)
-
-/**
- * Returns a new Tuple9 based on this Arity9.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9> Arity9<T1, T2, T3, T4, T5, T6, T7, T8, T9>.toTuple(): Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9> =
-    Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9
-    )
-
-/**
- * Returns a new Tuple10 based on this Arity10.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Arity10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.toTuple(): Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> =
-    Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10
-    )
-
-/**
- * Returns a new Tuple11 based on this Arity11.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Arity11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.toTuple(): Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> =
-    Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11
-    )
-
-/**
- * Returns a new Tuple12 based on this Arity12.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Arity12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.toTuple(): Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> =
-    Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12
-    )
-
-/**
- * Returns a new Tuple13 based on this Arity13.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Arity13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.toTuple(): Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> =
-    Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13
-    )
-
-/**
- * Returns a new Tuple14 based on this Arity14.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Arity14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.toTuple(): Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> =
-    Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14
-    )
-
-/**
- * Returns a new Tuple15 based on this Arity15.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Arity15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.toTuple(): Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> =
-    Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15
-    )
-
-/**
- * Returns a new Tuple16 based on this Arity16.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Arity16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.toTuple(): Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> =
-    Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16
-    )
-
-/**
- * Returns a new Tuple17 based on this Arity17.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> Arity17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.toTuple(): Tuple17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17> =
-    Tuple17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16,
-        this._17
-    )
-
-/**
- * Returns a new Tuple18 based on this Arity18.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Arity18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.toTuple(): Tuple18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> =
-    Tuple18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16,
-        this._17,
-        this._18
-    )
-
-/**
- * Returns a new Tuple19 based on this Arity19.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> Arity19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.toTuple(): Tuple19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19> =
-    Tuple19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16,
-        this._17,
-        this._18,
-        this._19
-    )
-
-/**
- * Returns a new Tuple20 based on this Arity20.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> Arity20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.toTuple(): Tuple20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> =
-    Tuple20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16,
-        this._17,
-        this._18,
-        this._19,
-        this._20
-    )
-
-/**
- * Returns a new Tuple21 based on this Arity21.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> Arity21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.toTuple(): Tuple21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21> =
-    Tuple21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16,
-        this._17,
-        this._18,
-        this._19,
-        this._20,
-        this._21
-    )
-
-/**
- * Returns a new Tuple22 based on this Arity22.
- **/
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-fun <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> Arity22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.toTuple(): Tuple22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> =
-    Tuple22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>(
-        this._1,
-        this._2,
-        this._3,
-        this._4,
-        this._5,
-        this._6,
-        this._7,
-        this._8,
-        this._9,
-        this._10,
-        this._11,
-        this._12,
-        this._13,
-        this._14,
-        this._15,
-        this._16,
-        this._17,
-        this._18,
-        this._19,
-        this._20,
-        this._21,
-        this._22
-    )
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt
index 65a54fc7..6b7b0af1 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt
@@ -34,10 +34,10 @@ import org.apache.spark.api.java.function.FlatMapFunction
 import org.apache.spark.api.java.function.ForeachFunction
 import org.apache.spark.api.java.function.ForeachPartitionFunction
 import org.apache.spark.api.java.function.MapFunction
+import org.apache.spark.api.java.function.MapPartitionsFunction
 import org.apache.spark.api.java.function.ReduceFunction
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.*
-import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions
 import scala.Tuple2
 import scala.Tuple3
 import scala.Tuple4
@@ -49,7 +49,7 @@ import kotlin.reflect.KProperty1
  * Utility method to create dataset from list
  */
 inline fun <reified T> SparkSession.toDS(list: List<T>): Dataset<T> =
-    createDataset(list, encoder<T>())
+    createDataset(list, kotlinEncoderFor<T>())
 
 /**
  * Utility method to create dataframe from list
@@ -61,26 +61,26 @@ inline fun <reified T> SparkSession.toDF(list: List<T>, vararg colNames: String)
  * Utility method to create dataset from *array or vararg arguments
  */
 inline fun <reified T> SparkSession.dsOf(vararg t: T): Dataset<T> =
-    createDataset(t.asList(), encoder<T>())
+    createDataset(t.toList(), kotlinEncoderFor<T>())
 
 /**
  * Utility method to create dataframe from *array or vararg arguments
  */
 inline fun <reified T> SparkSession.dfOf(vararg t: T): Dataset<Row> =
-    createDataset(t.asList(), encoder<T>()).toDF()
+    createDataset(t.toList(), kotlinEncoderFor<T>()).toDF()
 
 /**
  * Utility method to create dataframe from *array or vararg arguments with given column names
  */
 inline fun <reified T> SparkSession.dfOf(colNames: Array<String>, vararg t: T): Dataset<Row> =
-    createDataset(t.asList(), encoder<T>())
+    createDataset(t.toList(), kotlinEncoderFor<T>())
         .run { if (colNames.isEmpty()) toDF() else toDF(*colNames) }
 
 /**
  * Utility method to create dataset from list
  */
 inline fun <reified T> List<T>.toDS(spark: SparkSession): Dataset<T> =
-    spark.createDataset(this, encoder<T>())
+    spark.createDataset(this, kotlinEncoderFor<T>())
 
 /**
  * Utility method to create dataframe from list
@@ -104,13 +104,13 @@ inline fun <reified T> Array<T>.toDF(spark: SparkSession, vararg colNames: Strin
  * Utility method to create dataset from RDD
  */
 inline fun <reified T> RDD<T>.toDS(spark: SparkSession): Dataset<T> =
-    spark.createDataset(this, encoder<T>())
+    spark.createDataset(this, kotlinEncoderFor<T>())
 
 /**
  * Utility method to create dataset from JavaRDD
  */
 inline fun <reified T> JavaRDDLike<T, *>.toDS(spark: SparkSession): Dataset<T> =
-    spark.createDataset(this.rdd(), encoder<T>())
+    spark.createDataset(this.rdd(), kotlinEncoderFor<T>())
 
 /**
  * Utility method to create Dataset<Row> (Dataframe) from JavaRDD.
@@ -132,7 +132,7 @@ inline fun <reified T> RDD<T>.toDF(spark: SparkSession, vararg colNames: String)
  * Returns a new Dataset that contains the result of applying [func] to each element.
  */
 inline fun <reified T, reified R> Dataset<T>.map(noinline func: (T) -> R): Dataset<R> =
-    map(MapFunction(func), encoder<R>())
+    map(MapFunction(func), kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -140,7 +140,7 @@ inline fun <reified T, reified R> Dataset<T>.map(noinline func: (T) -> R): Datas
  * and then flattening the results.
  */
 inline fun <T, reified R> Dataset<T>.flatMap(noinline func: (T) -> Iterator<R>): Dataset<R> =
-    flatMap(func, encoder<R>())
+    flatMap(func, kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -148,21 +148,21 @@ inline fun <T, reified R> Dataset<T>.flatMap(noinline func: (T) -> Iterator<R>):
  * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6)`.
  */
 inline fun <reified T, I : Iterable<T>> Dataset<I>.flatten(): Dataset<T> =
-    flatMap(FlatMapFunction { it.iterator() }, encoder<T>())
+    flatMap(FlatMapFunction { it.iterator() }, kotlinEncoderFor<T>())
 
 /**
  * (Kotlin-specific)
  * Returns a [KeyValueGroupedDataset] where the data is grouped by the given key [func].
  */
 inline fun <T, reified R> Dataset<T>.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset<R, T> =
-    groupByKey(MapFunction(func), encoder<R>())
+    groupByKey(MapFunction(func), kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
  * Returns a new Dataset that contains the result of applying [func] to each partition.
  */
 inline fun <T, reified R> Dataset<T>.mapPartitions(noinline func: (Iterator<T>) -> Iterator<R>): Dataset<R> =
-    mapPartitions(func, encoder<R>())
+    mapPartitions(MapPartitionsFunction(func), kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -193,15 +193,6 @@ inline fun <reified T1, T2> Dataset<Tuple2<T1, T2>>.takeKeys(): Dataset<T1> = ma
  */
 inline fun <reified T1, T2> Dataset<Pair<T1, T2>>.takeKeys(): Dataset<T1> = map { it.first }
 
-/**
- * (Kotlin-specific)
- * Maps the Dataset to only retain the "keys" or [Arity2._1] values.
- */
-@Suppress("DEPRECATION")
-@JvmName("takeKeysArity2")
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-inline fun <reified T1, T2> Dataset<Arity2<T1, T2>>.takeKeys(): Dataset<T1> = map { it._1 }
-
 /**
  * (Kotlin-specific)
  * Maps the Dataset to only retain the "values" or [Tuple2._2] values.
@@ -215,22 +206,13 @@ inline fun <T1, reified T2> Dataset<Tuple2<T1, T2>>.takeValues(): Dataset<T2> =
  */
 inline fun <T1, reified T2> Dataset<Pair<T1, T2>>.takeValues(): Dataset<T2> = map { it.second }
 
-/**
- * (Kotlin-specific)
- * Maps the Dataset to only retain the "values" or [Arity2._2] values.
- */
-@Suppress("DEPRECATION")
-@JvmName("takeValuesArity2")
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-inline fun <T1, reified T2> Dataset<Arity2<T1, T2>>.takeValues(): Dataset<T2> = map { it._2 }
-
 /** DEPRECATED: Use [as] or [to] for this. */
 @Deprecated(
     message = "Deprecated, since we already have `as`() and to().",
     replaceWith = ReplaceWith("this.to<R>()"),
     level = DeprecationLevel.ERROR,
 )
-inline fun <T, reified R> Dataset<T>.downcast(): Dataset<R> = `as`(encoder<R>())
+inline fun <T, reified R> Dataset<T>.downcast(): Dataset<R> = `as`(kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -252,7 +234,7 @@ inline fun <T, reified R> Dataset<T>.downcast(): Dataset<R> = `as`(encoder<R>())
  *
  * @see to as alias for [as]
  */
-inline fun <reified R> Dataset<*>.`as`(): Dataset<R> = `as`(encoder<R>())
+inline fun <reified R> Dataset<*>.`as`(): Dataset<R> = `as`(kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -274,7 +256,7 @@ inline fun <reified R> Dataset<*>.`as`(): Dataset<R> = `as`(encoder<R>())
  *
  * @see as as alias for [to]
  */
-inline fun <reified R> Dataset<*>.to(): Dataset<R> = `as`(encoder<R>())
+inline fun <reified R> Dataset<*>.to(): Dataset<R> = `as`(kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -292,12 +274,16 @@ inline fun <reified T> Dataset<T>.forEachPartition(noinline func: (Iterator<T>)
 /**
  * It's hard to call `Dataset.debugCodegen` from kotlin, so here is utility for that
  */
-fun <T> Dataset<T>.debugCodegen(): Dataset<T> = also { KSparkExtensions.debugCodegen(it) }
+fun <T> Dataset<T>.debugCodegen(): Dataset<T> = also {
+    org.apache.spark.sql.execution.debug.`package$`.`MODULE$`.DebugQuery(it).debugCodegen()
+}
 
 /**
  * It's hard to call `Dataset.debug` from kotlin, so here is utility for that
  */
-fun <T> Dataset<T>.debug(): Dataset<T> = also { KSparkExtensions.debug(it) }
+fun <T> Dataset<T>.debug(): Dataset<T> = also {
+    org.apache.spark.sql.execution.debug.`package$`.`MODULE$`.DebugQuery(it).debug()
+}
 
 
 /**
@@ -370,25 +356,13 @@ fun <T1, T2> Dataset<Tuple2<T1, T2>>.sortByKey(): Dataset<Tuple2<T1, T2>> = sort
 @JvmName("sortByTuple2Value")
 fun <T1, T2> Dataset<Tuple2<T1, T2>>.sortByValue(): Dataset<Tuple2<T1, T2>> = sort("_2")
 
-/** Returns a dataset sorted by the first (`_1`) value of each [Arity2] inside. */
-@Suppress("DEPRECATION")
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-@JvmName("sortByArity2Key")
-fun <T1, T2> Dataset<Arity2<T1, T2>>.sortByKey(): Dataset<Arity2<T1, T2>> = sort("_1")
-
-/** Returns a dataset sorted by the second (`_2`) value of each [Arity2] inside. */
-@Suppress("DEPRECATION")
-@Deprecated("Use Scala tuples instead.", ReplaceWith(""))
-@JvmName("sortByArity2Value")
-fun <T1, T2> Dataset<Arity2<T1, T2>>.sortByValue(): Dataset<Arity2<T1, T2>> = sort("_2")
-
 /** Returns a dataset sorted by the first (`first`) value of each [Pair] inside. */
 @JvmName("sortByPairKey")
-fun <T1, T2> Dataset<Pair<T1, T2>>.sortByKey(): Dataset<Pair<T1, T2>> = sort("first")
+fun <T1, T2> Dataset<Pair<T1, T2>>.sortByKey(): Dataset<Pair<T1, T2>> = sort("getFirst")
 
 /** Returns a dataset sorted by the second (`second`) value of each [Pair] inside. */
 @JvmName("sortByPairValue")
-fun <T1, T2> Dataset<Pair<T1, T2>>.sortByValue(): Dataset<Pair<T1, T2>> = sort("second")
+fun <T1, T2> Dataset<Pair<T1, T2>>.sortByValue(): Dataset<Pair<T1, T2>> = sort("getSecond")
 
 /**
  * This function creates block, where one can call any further computations on already cached dataset
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt
index ecf62b19..0306b8f6 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt
@@ -29,100 +29,68 @@
 
 package org.jetbrains.kotlinx.spark.api
 
-import org.apache.spark.sql.*
-import org.apache.spark.sql.Encoders.*
-import org.apache.spark.sql.KotlinReflection.*
-import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
-import org.apache.spark.sql.types.*
+import org.apache.commons.lang3.reflect.TypeUtils.*
+import org.apache.spark.sql.Encoder
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.catalyst.DefinedByConstructorParams
+import org.apache.spark.sql.catalyst.encoders.AgnosticEncoder
+import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders
+import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.EncoderField
+import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.JavaBeanEncoder
+import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.ProductEncoder
+import org.apache.spark.sql.catalyst.encoders.OuterScopes
+import org.apache.spark.sql.types.DataType
+import org.apache.spark.sql.types.Decimal
+import org.apache.spark.sql.types.Metadata
+import org.apache.spark.sql.types.SQLUserDefinedType
+import org.apache.spark.sql.types.StructType
+import org.apache.spark.sql.types.UDTRegistration
+import org.apache.spark.sql.types.UserDefinedType
 import org.apache.spark.unsafe.types.CalendarInterval
-import scala.Product
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.ColumnName
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
+import org.jetbrains.kotlinx.spark.api.udts.DatePeriodUdt
+import org.jetbrains.kotlinx.spark.api.udts.DateTimePeriodUdt
+import org.jetbrains.kotlinx.spark.api.udts.InstantUdt
+import org.jetbrains.kotlinx.spark.api.udts.LocalDateTimeUdt
+import org.jetbrains.kotlinx.spark.api.udts.LocalDateUdt
 import scala.reflect.ClassTag
-import java.beans.PropertyDescriptor
-import java.math.BigDecimal
-import java.math.BigInteger
-import java.sql.Date
-import java.sql.Timestamp
-import java.time.*
+import java.io.Serializable
 import java.util.*
-import java.util.concurrent.ConcurrentHashMap
-import kotlin.Any
-import kotlin.Array
-import kotlin.Boolean
-import kotlin.BooleanArray
-import kotlin.Byte
-import kotlin.ByteArray
-import kotlin.Double
-import kotlin.DoubleArray
-import kotlin.ExperimentalStdlibApi
-import kotlin.Float
-import kotlin.FloatArray
-import kotlin.IllegalArgumentException
-import kotlin.Int
-import kotlin.IntArray
-import kotlin.Long
-import kotlin.LongArray
-import kotlin.OptIn
-import kotlin.Short
-import kotlin.ShortArray
-import kotlin.String
-import kotlin.Suppress
-import kotlin.reflect.*
-import kotlin.reflect.full.findAnnotation
+import javax.annotation.Nonnull
+import kotlin.reflect.KClass
+import kotlin.reflect.KMutableProperty
+import kotlin.reflect.KProperty1
+import kotlin.reflect.KType
+import kotlin.reflect.KTypeProjection
+import kotlin.reflect.full.createType
+import kotlin.reflect.full.declaredMemberFunctions
+import kotlin.reflect.full.declaredMemberProperties
 import kotlin.reflect.full.hasAnnotation
 import kotlin.reflect.full.isSubclassOf
+import kotlin.reflect.full.isSubtypeOf
 import kotlin.reflect.full.primaryConstructor
+import kotlin.reflect.full.staticFunctions
+import kotlin.reflect.full.withNullability
+import kotlin.reflect.jvm.javaGetter
+import kotlin.reflect.jvm.javaMethod
 import kotlin.reflect.jvm.jvmName
-import kotlin.to
-
-@JvmField
-val ENCODERS: Map<KClass<*>, Encoder<*>> = mapOf(
-    Boolean::class to BOOLEAN(),
-    Byte::class to BYTE(),
-    Short::class to SHORT(),
-    Int::class to INT(),
-    Long::class to LONG(),
-    Float::class to FLOAT(),
-    Double::class to DOUBLE(),
-    String::class to STRING(),
-    BigDecimal::class to DECIMAL(),
-    Date::class to DATE(),
-    LocalDate::class to LOCALDATE(),
-    Timestamp::class to TIMESTAMP(),
-    Instant::class to INSTANT(),
-    ByteArray::class to BINARY(),
-    //#if sparkMinor >= 3.2
-    Duration::class to DURATION(),
-    Period::class to PERIOD(),
-    //#endif
-)
-
-private fun checkIfEncoderRequiresNewerVersion(kClass: KClass<*>) {
-    when (kClass) {
-        //#if sparkMinor < 3.2
-        //$Duration::class, Period::class -> throw IllegalArgumentException("$kClass is supported in Spark 3.2+")
-        //#endif
-    }
-}
-
-private val knownDataTypes: Map<KClass<out Any>, DataType> = mapOf(
-    Byte::class to DataTypes.ByteType,
-    Short::class to DataTypes.ShortType,
-    Int::class to DataTypes.IntegerType,
-    Long::class to DataTypes.LongType,
-    Boolean::class to DataTypes.BooleanType,
-    Float::class to DataTypes.FloatType,
-    Double::class to DataTypes.DoubleType,
-    String::class to DataTypes.StringType,
-    LocalDate::class to DataTypes.DateType,
-    Date::class to DataTypes.DateType,
-    Timestamp::class to DataTypes.TimestampType,
-    Instant::class to DataTypes.TimestampType,
-    ByteArray::class to DataTypes.BinaryType,
-    Decimal::class to DecimalType.SYSTEM_DEFAULT(),
-    BigDecimal::class to DecimalType.SYSTEM_DEFAULT(),
-    BigInteger::class to DecimalType.SYSTEM_DEFAULT(),
-    CalendarInterval::class to DataTypes.CalendarIntervalType,
-)
+import kotlin.reflect.typeOf
+
+fun <T : Any> kotlinEncoderFor(
+    kClass: KClass<T>,
+    arguments: List<KTypeProjection> = emptyList(),
+    nullable: Boolean = false,
+    annotations: List<Annotation> = emptyList()
+): Encoder<T> =
+    applyEncoder(
+        KotlinTypeInference.encoderFor(
+            kClass = kClass,
+            arguments = arguments,
+            nullable = nullable,
+            annotations = annotations,
+        )
+    )
 
 /**
  * Main method of API, which gives you seamless integration with Spark:
@@ -133,253 +101,643 @@ private val knownDataTypes: Map<KClass<out Any>, DataType> = mapOf(
  * @param T type, supported by Spark
  * @return generated encoder
  */
-@OptIn(ExperimentalStdlibApi::class)
-inline fun <reified T> encoder(): Encoder<T> = generateEncoder(typeOf<T>(), T::class)
+inline fun <reified T> kotlinEncoderFor(): Encoder<T> =
+    kotlinEncoderFor(
+        typeOf<T>()
+    )
+
+fun <T> kotlinEncoderFor(kType: KType): Encoder<T> =
+    applyEncoder(
+        KotlinTypeInference.encoderFor(kType)
+    )
 
 /**
- * @see encoder
+ * For spark-connect, no ExpressionEncoder is needed, so we can just return the AgnosticEncoder.
  */
-@Suppress("UNCHECKED_CAST")
-fun <T> generateEncoder(type: KType, cls: KClass<*>): Encoder<T> {
-    checkIfEncoderRequiresNewerVersion(cls)
-    return when {
-        isSupportedByKotlinClassEncoder(cls) -> kotlinClassEncoder(schema = memoizedSchema(type), kClass = cls)
-        else -> ENCODERS[cls] as? Encoder<T>? ?: bean(cls.java)
-    } as Encoder<T>
+private fun <T> applyEncoder(agnosticEncoder: AgnosticEncoder<T>): Encoder<T> {
+    //#if sparkConnect == false
+    return org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.apply(agnosticEncoder)
+    //#else
+    //$return agnosticEncoder
+    //#endif
 }
 
-private fun isSupportedByKotlinClassEncoder(cls: KClass<*>): Boolean =
-    when {
-        cls == ByteArray::class -> false // uses binary encoder
-        cls.isData -> true
-        cls.isSubclassOf(Map::class) -> true
-        cls.isSubclassOf(Iterable::class) -> true
-        cls.isSubclassOf(Product::class) -> true
-        cls.java.isArray -> true
-        cls.hasAnnotation<SQLUserDefinedType>() -> true
-        UDTRegistration.exists(cls.jvmName) -> true
-        else -> false
-    }
 
+@Deprecated("Use kotlinEncoderFor instead", ReplaceWith("kotlinEncoderFor<T>()"))
+inline fun <reified T> encoder(): Encoder<T> = kotlinEncoderFor(typeOf<T>())
 
-private fun <T> kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder<T> {
-    val serializer =
-        if (schema is DataTypeWithClass) serializerFor(kClass.java, schema)
-        else serializerForType(getType(kClass.java))
+internal fun StructType.unwrap(): DataType =
+    if (fields().singleOrNull()?.name() == "value") fields().single().dataType()
+    else this
 
-    val deserializer =
-        if (schema is DataTypeWithClass) deserializerFor(kClass.java, schema)
-        else deserializerForType(getType(kClass.java))
+inline fun <reified T> schemaFor(): DataType = schemaFor(typeOf<T>())
 
-    return ExpressionEncoder(serializer, deserializer, ClassTag.apply(kClass.java))
-}
+fun schemaFor(kType: KType): DataType = kotlinEncoderFor<Any?>(kType).schema().unwrap()
 
-/**
- * Not meant to be used by the user explicitly.
- *
- * This function generates the DataType schema for supported classes, including Kotlin data classes, [Map],
- * [Iterable], [Product], [Array], and combinations of those.
- *
- * It's mainly used by [generateEncoder]/[encoder].
- */
-@OptIn(ExperimentalStdlibApi::class)
-fun schema(type: KType, map: Map<String, KType> = mapOf()): DataType {
-    val primitiveSchema = knownDataTypes[type.classifier]
-    if (primitiveSchema != null)
-        return KSimpleTypeWrapper(
-            /* dt = */ primitiveSchema,
-            /* cls = */ (type.classifier!! as KClass<*>).java,
-            /* nullable = */ type.isMarkedNullable
-        )
+@Deprecated("Use schemaFor instead", ReplaceWith("schemaFor(kType)"))
+fun schema(kType: KType) = schemaFor(kType)
 
-    val klass = type.classifier as? KClass<*> ?: throw IllegalArgumentException("Unsupported type $type")
-    val args = type.arguments
+object KotlinTypeInference : Serializable {
 
-    val types = transitiveMerge(
-        map,
-        klass.typeParameters.zip(args).associate {
-            it.first.name to it.second.type!!
-        },
+    // https://blog.stylingandroid.com/kotlin-serializable-objects/
+    private fun readResolve(): Any = KotlinTypeInference
+
+    /**
+     * @param kClass the class for which to infer the encoder.
+     * @param arguments the generic type arguments for the class.
+     * @param nullable whether the class is nullable.
+     * @param annotations the annotations for the class.
+     * @return an [AgnosticEncoder] for the given class arguments.
+     */
+    fun <T : Any> encoderFor(
+        kClass: KClass<T>,
+        arguments: List<KTypeProjection> = emptyList(),
+        nullable: Boolean = false,
+        annotations: List<Annotation> = emptyList()
+    ): AgnosticEncoder<T> = encoderFor(
+        kType = kClass.createType(
+            arguments = arguments,
+            nullable = nullable,
+            annotations = annotations,
+        )
     )
 
-    return when {
-        klass.isSubclassOf(Enum::class) ->
-            KSimpleTypeWrapper(
-                /* dt = */ DataTypes.StringType,
-                /* cls = */ klass.java,
-                /* nullable = */ type.isMarkedNullable
-            )
+    /**
+     * @return an [AgnosticEncoder] for the given type [T].
+     */
+    @JvmName("inlineEncoderFor")
+    inline fun <reified T> encoderFor(): AgnosticEncoder<T> =
+        encoderFor(kType = typeOf<T>())
+
+    /**
+     * Main entry function for the inference of encoders.
+     *
+     * @return an [AgnosticEncoder] for the given [kType].
+     */
+    @Suppress("UNCHECKED_CAST")
+    fun <T> encoderFor(kType: KType): AgnosticEncoder<T> {
+        registerUdts()
+        return encoderFor(
+            currentType = kType,
+            seenTypeSet = emptySet(),
+            typeVariables = emptyMap(),
+        ) as AgnosticEncoder<T>
+    }
+
+
+    private inline fun <reified T> KType.isSubtypeOf(): Boolean = isSubtypeOf(typeOf<T>())
+
+    private val KType.simpleName
+        get() = toString().removeSuffix("?").removeSuffix("!")
+
+    private fun KType.isDefinedByScalaConstructorParams(): Boolean = when {
+        isSubtypeOf<scala.Option<*>?>() -> arguments.first().type!!.isDefinedByScalaConstructorParams()
+        else -> isSubtypeOf<scala.Product?>() || isSubtypeOf<DefinedByConstructorParams?>()
+    }
 
-        klass.isSubclassOf(Iterable::class) || klass.java.isArray -> {
-            val listParam = if (klass.java.isArray) {
-                when (klass) {
-                    IntArray::class -> typeOf<Int>()
-                    LongArray::class -> typeOf<Long>()
-                    FloatArray::class -> typeOf<Float>()
-                    DoubleArray::class -> typeOf<Double>()
-                    BooleanArray::class -> typeOf<Boolean>()
-                    ShortArray::class -> typeOf<Short>()
-                    /* ByteArray handled by BinaryType */
-                    else -> types.getValue(klass.typeParameters[0].name)
+    private fun KType.getScalaConstructorParameters(
+        genericTypeMap: Map<String, KType>,
+        kClass: KClass<*> = classifier as KClass<*>,
+    ): List<Pair<String, KType>> {
+        val constructor =
+            kClass.primaryConstructor
+                ?: kClass.constructors.firstOrNull()
+                ?: kClass.staticFunctions.firstOrNull {
+                    it.name == "apply" && it.returnType.classifier == kClass
                 }
-            } else types.getValue(klass.typeParameters[0].name)
+                ?: error("couldn't find constructor for $this")
+
+        val kParameters = constructor.parameters
+        val params = kParameters.map { param ->
+            val paramType = if (param.type.isSubtypeOf<scala.AnyVal>()) {
+                // Replace value class with underlying type
+                param.type.getScalaConstructorParameters(genericTypeMap).first().second
+            } else {
+                // check if the type was a filled-in generic type, otherwise just use the given type
+                genericTypeMap[param.type.simpleName] ?: param.type
+            }
 
-            val dataType = DataTypes.createArrayType(
-                /* elementType = */ schema(listParam, types),
-                /* containsNull = */ listParam.isMarkedNullable
-            )
+            param.name!! to paramType
+        }
 
-            KComplexTypeWrapper(
-                /* dt = */ dataType,
-                /* cls = */ klass.java,
-                /* nullable = */ type.isMarkedNullable
-            )
+        return params
+    }
+
+    /**
+     * Provides helpful warnings for when something goes wrong with encoding a certain data class.
+     */
+    private fun KClass<*>.checkIsSparkified(props: List<KProperty1<*, *>>, propHasColumnNameAnnotation: List<Boolean>) {
+        val isAnnotated = hasAnnotation<Sparkify>()
+
+        val mismatchedNames = buildList {
+            for ((i, prop) in props.withIndex()) {
+                if (isAnnotated && propHasColumnNameAnnotation[i]) continue
+                val name = prop.name
+                val getterMethodName = prop.getter.javaMethod!!.name
+                if (name != getterMethodName)
+                    add(name to getterMethodName)
+            }
         }
 
-        klass == Map::class -> {
-            val mapKeyParam = types.getValue(klass.typeParameters[0].name)
-            val mapValueParam = types.getValue(klass.typeParameters[1].name)
+        val isPair = this == Pair::class
+        val isTriple = this == Triple::class
 
-            val dataType = DataTypes.createMapType(
-                /* keyType = */ schema(mapKeyParam, types),
-                /* valueType = */ schema(mapValueParam, types),
-                /* valueContainsNull = */ true
-            )
+        // can't be checked if injected by Sparkify
+        val isProduct = this.isSubclassOf(scala.Product::class)
+        val isSerializable = this.isSubclassOf(Serializable::class)
 
-            KComplexTypeWrapper(
-                /* dt = */ dataType,
-                /* cls = */ klass.java,
-                /* nullable = */ type.isMarkedNullable
-            )
+        when {
+            // happy path
+            isAnnotated && mismatchedNames.isEmpty() -> return
+
+            // not annotated but still happy as spark will like it
+            !isAnnotated && mismatchedNames.isEmpty() && isProduct && isSerializable -> return
         }
 
-        klass.isData -> {
-
-            val structType = StructType(
-                klass
-                    .primaryConstructor!!
-                    .parameters
-                    .filter { it.findAnnotation<Transient>() == null }
-                    .map {
-                        val projectedType = types[it.type.toString()] ?: it.type
-
-                        val readMethodName = when {
-                            it.name!!.startsWith("is") -> it.name!!
-                            else -> "get${it.name!!.replaceFirstChar { it.uppercase() }}"
-                        }
-
-                        val propertyDescriptor = PropertyDescriptor(
-                            /* propertyName = */ it.name,
-                            /* beanClass = */ klass.java,
-                            /* readMethodName = */ readMethodName,
-                            /* writeMethodName = */ null
-                        )
-
-                        KStructField(
-                            /* getterName = */ propertyDescriptor.readMethod.name,
-                            /* delegate = */ StructField(
-                                /* name = */ it.name,
-                                /* dataType = */ schema(projectedType, types),
-                                /* nullable = */ projectedType.isMarkedNullable,
-                                /* metadata = */ Metadata.empty()
-                            )
-                        )
-                    }
-                    .toTypedArray()
-            )
-            KDataTypeWrapper(structType, klass.java, true)
+        val warningMessage = buildString {
+            appendLine(this@checkIsSparkified.toString() + " does not seem to be ready for Kotlin Spark:")
+            if (isAnnotated) {
+                appendLine("  - It is annotated with @Sparkify, but, the compiler plugin might not be installed or may be misfunctioning.")
+            } else {
+                appendLine("  - It is not annotated with @Sparkify and it does not have the correct structure for Spark:")
+            }
+            if (mismatchedNames.isNotEmpty()) {
+                appendLine("  - The following property names do not match their getter method names:")
+                for ((name, getter) in mismatchedNames) {
+                    appendLine("    - prop name: `$name`, getter name: `$getter`")
+                }
+                appendLine("    Spark uses the getter method names to get the column names.")
+                appendLine("    Properties must be annotated with @get:JvmName(\"<PROP_NAME>\") to generate the right getters. Else, your columns might be be named \"getXYZ\".")
+                appendLine("    @Sparkify can do this for you.")
+                appendLine("    If you agree with the getter/column names above (like if you've added custom @get:JvmName's), you can ignore this warning.")
+            }
+            if (isPair) {
+                appendLine("  - It is a Pair, which is not well supported by Spark. You can use scala.Tuple2 instead.")
+            } else if (isTriple) {
+                appendLine("  - It is a Triple, which is not well supported by Spark. You can use scala.Tuple3 instead.")
+            }
+            if (!isProduct) {
+                appendLine("  - It is not a scala.Product, which is fine for most cases, but can break compatibility with UDFs. You can let your data class implement scala.Product to fix this or let @Sparkify handle it for you.")
+            }
+            if (!isSerializable) {
+                appendLine("  - It is not Serializable, which is fine for most cases, but can break compatibility. You can let your data class implement java.io.Serializable to fix this or let @Sparkify handle it for you.")
+            }
+        }
+
+        println(warningMessage)
+    }
+
+    /**
+     * Can merge two maps transitively.
+     * This means that given
+     * ```
+     * a: { A -> B, D -> E }
+     * b: { B -> C, G -> F }
+     * ```
+     * it will return
+     * ```
+     * { A -> C, D -> E, G -> F }
+     * ```
+     * @param valueToKey a function that returns (an optional) key for a given value
+     */
+    private fun <K, V> transitiveMerge(a: Map<K, V>, b: Map<K, V>, valueToKey: (V) -> K?): Map<K, V> =
+        a + b.mapValues { a.getOrDefault(valueToKey(it.value), it.value) }
+
+    private fun registerUdts() {
+        val udts = listOf(
+            kotlinx.datetime.LocalDate::class to LocalDateUdt::class,
+            kotlinx.datetime.Instant::class to InstantUdt::class,
+            kotlinx.datetime.LocalDateTime::class to LocalDateTimeUdt::class,
+            kotlinx.datetime.DatePeriod::class to DatePeriodUdt::class,
+            kotlinx.datetime.DateTimePeriod::class to DateTimePeriodUdt::class,
+        )
+
+        for ((kClass, udtClass) in udts) {
+            if (!UDTRegistration.exists(kClass.java.name)) {
+                UDTRegistration.register(kClass.java.name, udtClass.java.name)
+            }
         }
-        klass.isSubclassOf(Product::class) -> {
+        // TODO
+        //  UDTRegistration.register(kotlin.time.Duration::class.java.name, DurationUdt::class.java.name)
+    }
+
+    /**
+     *
+     */
+    private fun encoderFor(
+        currentType: KType,
+        seenTypeSet: Set<KType>,
+
+        // how the generic types of the data class (like T, S) are filled in for this instance of the class
+        typeVariables: Map<String, KType>,
+    ): AgnosticEncoder<*> {
+        val kClass =
+            currentType.classifier as? KClass<*> ?: throw IllegalArgumentException("Unsupported type $currentType")
+        val jClass = kClass.java
+
+        // given t == typeOf<Pair<Int, Pair<String, Any>>>(), these are [Int, Pair<String, Any>]
+        val tArguments = currentType.arguments
+
+        // the type arguments of the class, like T, S
+        val expectedTypeParameters = kClass.typeParameters.map { it }
+
+        @Suppress("NAME_SHADOWING")
+        val typeVariables = transitiveMerge(
+            a = typeVariables,
+            b = (expectedTypeParameters zip tArguments).toMap()
+                .mapValues { (expectedType, givenType) ->
+                    if (givenType.type != null) return@mapValues givenType.type!! // fill in the type as is
+
+                    // when givenType is *, use the upperbound
+                    expectedType.upperBounds.first()
+                }.mapKeys { it.key.name }
+        ) { it.simpleName }
+
+        return when {
+            // primitives java / kotlin
+            currentType == typeOf<Boolean>() -> AgnosticEncoders.`PrimitiveBooleanEncoder$`.`MODULE$`
+            currentType == typeOf<Byte>() -> AgnosticEncoders.`PrimitiveByteEncoder$`.`MODULE$`
+            currentType == typeOf<Short>() -> AgnosticEncoders.`PrimitiveShortEncoder$`.`MODULE$`
+            currentType == typeOf<Int>() -> AgnosticEncoders.`PrimitiveIntEncoder$`.`MODULE$`
+            currentType == typeOf<Long>() -> AgnosticEncoders.`PrimitiveLongEncoder$`.`MODULE$`
+            currentType == typeOf<Float>() -> AgnosticEncoders.`PrimitiveFloatEncoder$`.`MODULE$`
+            currentType == typeOf<Double>() -> AgnosticEncoders.`PrimitiveDoubleEncoder$`.`MODULE$`
+
+            // primitives scala
+            currentType == typeOf<scala.Boolean>() -> AgnosticEncoders.`PrimitiveBooleanEncoder$`.`MODULE$`
+            currentType == typeOf<scala.Byte>() -> AgnosticEncoders.`PrimitiveByteEncoder$`.`MODULE$`
+            currentType == typeOf<scala.Short>() -> AgnosticEncoders.`PrimitiveShortEncoder$`.`MODULE$`
+            currentType == typeOf<scala.Int>() -> AgnosticEncoders.`PrimitiveIntEncoder$`.`MODULE$`
+            currentType == typeOf<scala.Long>() -> AgnosticEncoders.`PrimitiveLongEncoder$`.`MODULE$`
+            currentType == typeOf<scala.Float>() -> AgnosticEncoders.`PrimitiveFloatEncoder$`.`MODULE$`
+            currentType == typeOf<scala.Double>() -> AgnosticEncoders.`PrimitiveDoubleEncoder$`.`MODULE$`
+
+            // boxed primitives java / kotlin
+            currentType.isSubtypeOf<Boolean?>() -> AgnosticEncoders.`BoxedBooleanEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Byte?>() -> AgnosticEncoders.`BoxedByteEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Short?>() -> AgnosticEncoders.`BoxedShortEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Int?>() -> AgnosticEncoders.`BoxedIntEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Long?>() -> AgnosticEncoders.`BoxedLongEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Float?>() -> AgnosticEncoders.`BoxedFloatEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Double?>() -> AgnosticEncoders.`BoxedDoubleEncoder$`.`MODULE$`
+
+            // boxed primitives scala
+            currentType.isSubtypeOf<scala.Boolean?>() -> AgnosticEncoders.`BoxedBooleanEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<scala.Byte?>() -> AgnosticEncoders.`BoxedByteEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<scala.Short?>() -> AgnosticEncoders.`BoxedShortEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<scala.Int?>() -> AgnosticEncoders.`BoxedIntEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<scala.Long?>() -> AgnosticEncoders.`BoxedLongEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<scala.Float?>() -> AgnosticEncoders.`BoxedFloatEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<scala.Double?>() -> AgnosticEncoders.`BoxedDoubleEncoder$`.`MODULE$`
+
+            // leaf encoders
+            currentType.isSubtypeOf<String?>() -> AgnosticEncoders.`StringEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Decimal?>() -> AgnosticEncoders.DEFAULT_SPARK_DECIMAL_ENCODER()
+            currentType.isSubtypeOf<scala.math.BigDecimal?>() -> AgnosticEncoders.DEFAULT_SCALA_DECIMAL_ENCODER()
+            currentType.isSubtypeOf<scala.math.BigInt?>() -> AgnosticEncoders.`ScalaBigIntEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<ByteArray?>() -> AgnosticEncoders.`BinaryEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<java.math.BigDecimal?>() -> AgnosticEncoders.DEFAULT_JAVA_DECIMAL_ENCODER()
+            currentType.isSubtypeOf<java.math.BigInteger?>() -> AgnosticEncoders.`JavaBigIntEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<CalendarInterval?>() -> AgnosticEncoders.`CalendarIntervalEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<java.time.LocalDate?>() -> AgnosticEncoders.STRICT_LOCAL_DATE_ENCODER()
+            currentType.isSubtypeOf<java.sql.Date?>() -> AgnosticEncoders.STRICT_DATE_ENCODER()
+            currentType.isSubtypeOf<java.time.Instant?>() -> AgnosticEncoders.STRICT_INSTANT_ENCODER()
+            currentType.isSubtypeOf<java.sql.Timestamp?>() -> AgnosticEncoders.STRICT_TIMESTAMP_ENCODER()
+            currentType.isSubtypeOf<java.time.LocalDateTime?>() -> AgnosticEncoders.`LocalDateTimeEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<java.time.Duration?>() -> AgnosticEncoders.`DayTimeIntervalEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<java.time.Period?>() -> AgnosticEncoders.`YearMonthIntervalEncoder$`.`MODULE$`
+            currentType.isSubtypeOf<Row?>() -> AgnosticEncoders.`UnboundRowEncoder$`.`MODULE$`
+
+            // enums
+            kClass.isSubclassOf(Enum::class) -> AgnosticEncoders.JavaEnumEncoder(ClassTag.apply<Any?>(jClass))
+
+            // TODO test
+            kClass.isSubclassOf(scala.Enumeration.Value::class) ->
+                AgnosticEncoders.ScalaEnumEncoder(jClass.superclass, ClassTag.apply<Any?>(jClass))
+
+            // udts
+            kClass.hasAnnotation<SQLUserDefinedType>() -> {
+                val annotation = jClass.getAnnotation(SQLUserDefinedType::class.java)!!
+                val udtClass = annotation.udt
+                val udt = udtClass.primaryConstructor!!.call()
+                AgnosticEncoders.UDTEncoder(udt, udtClass.java)
+            }
 
-            // create map from T1, T2 to Int, String etc.
-            val typeMap = klass.constructors.first().typeParameters.map { it.name }
-                .zip(
-                    type.arguments.map { it.type }
+            UDTRegistration.exists(kClass.jvmName) -> {
+                val udt = UDTRegistration.getUDTFor(kClass.jvmName)!!
+                    .get()!!
+                    .getConstructor()
+                    .newInstance() as UserDefinedType<*>
+
+                AgnosticEncoders.UDTEncoder(udt, udt.javaClass)
+            }
+
+            currentType.isSubtypeOf<kotlin.time.Duration?>() -> TODO("kotlin.time.Duration is unsupported. Use java.time.Duration for now.")
+
+            currentType.isSubtypeOf<scala.Option<*>?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = tArguments.first().type!!,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
                 )
-                .toMap()
+                AgnosticEncoders.OptionEncoder(elementEncoder)
+            }
 
-            // collect params by name and actual type
-            val params = klass.constructors.first().parameters.map {
-                val typeName = it.type.toString().replace("!", "")
-                it.name to (typeMap[typeName] ?: it.type)
+            // primitive arrays
+            currentType.isSubtypeOf<IntArray?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = typeOf<Int>(),
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, false)
+            }
+
+            currentType.isSubtypeOf<DoubleArray?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = typeOf<Double>(),
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, false)
             }
 
-            val structType = DataTypes.createStructType(
-                params.map { (fieldName, fieldType) ->
-                    val dataType = schema(fieldType, types)
-
-                    KStructField(
-                        /* getterName = */ fieldName,
-                        /* delegate = */ StructField(
-                            /* name = */ fieldName,
-                            /* dataType = */ dataType,
-                            /* nullable = */ fieldType.isMarkedNullable,
-                            /* metadata = */Metadata.empty()
-                        )
+            currentType.isSubtypeOf<FloatArray?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = typeOf<Float>(),
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, false)
+            }
+
+            currentType.isSubtypeOf<ShortArray?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = typeOf<Short>(),
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, false)
+            }
+
+            currentType.isSubtypeOf<LongArray?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = typeOf<Long>(),
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, false)
+            }
+
+            currentType.isSubtypeOf<BooleanArray?>() -> {
+                val elementEncoder = encoderFor(
+                    currentType = typeOf<Boolean>(),
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, false)
+            }
+
+            // boxed arrays
+            jClass.isArray -> {
+                val type = currentType.arguments.first().type!!
+                val elementEncoder = encoderFor(
+                    currentType = type.withNullability(true), // so we get a boxed array
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.ArrayEncoder(elementEncoder, true)
+            }
+
+            currentType.isSubtypeOf<List<*>?>() -> {
+                val subType = tArguments.first().type!!
+                val elementEncoder = encoderFor(
+                    currentType = subType,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.IterableEncoder<List<*>, _>(
+                    /* clsTag = */ ClassTag.apply(jClass),
+                    /* element = */ elementEncoder,
+                    /* containsNull = */ subType.isMarkedNullable,
+                    /* lenientSerialization = */ false,
+                )
+            }
+
+            currentType.isSubtypeOf<scala.collection.Seq<*>?>() -> {
+                val subType = tArguments.first().type!!
+                val elementEncoder = encoderFor(
+                    currentType = subType,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.IterableEncoder<scala.collection.Seq<*>, _>(
+                    /* clsTag = */ ClassTag.apply(jClass),
+                    /* element = */ elementEncoder,
+                    /* containsNull = */ subType.isMarkedNullable,
+                    /* lenientSerialization = */ false,
+                )
+            }
+
+            currentType.isSubtypeOf<Set<*>?>() -> {
+                val subType = tArguments.first().type!!
+                val elementEncoder = encoderFor(
+                    currentType = subType,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.IterableEncoder<Set<*>, _>(
+                    /* clsTag = */ ClassTag.apply(jClass),
+                    /* element = */ elementEncoder,
+                    /* containsNull = */ subType.isMarkedNullable,
+                    /* lenientSerialization = */ false,
+                )
+            }
+
+            currentType.isSubtypeOf<scala.collection.Set<*>?>() -> {
+                val subType = tArguments.first().type!!
+                val elementEncoder = encoderFor(
+                    currentType = subType,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.IterableEncoder<scala.collection.Set<*>, _>(
+                    /* clsTag = */ ClassTag.apply(jClass),
+                    /* element = */ elementEncoder,
+                    /* containsNull = */ subType.isMarkedNullable,
+                    /* lenientSerialization = */ false,
+                )
+            }
+
+            currentType.isSubtypeOf<Map<*, *>?>() || currentType.isSubtypeOf<scala.collection.Map<*, *>?>() -> {
+                val keyEncoder = encoderFor(
+                    currentType = tArguments[0].type!!,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                val valueEncoder = encoderFor(
+                    currentType = tArguments[1].type!!,
+                    seenTypeSet = seenTypeSet,
+                    typeVariables = typeVariables,
+                )
+                AgnosticEncoders.MapEncoder(
+                    /* clsTag = */ ClassTag.apply<Map<*, *>>(jClass),
+                    /* keyEncoder = */ keyEncoder,
+                    /* valueEncoder = */ valueEncoder,
+                    /* valueContainsNull = */ tArguments[1].type!!.isMarkedNullable,
+                )
+            }
+
+            kClass.isData -> {
+                val constructor = kClass.primaryConstructor!!
+                val kParameters = constructor.parameters
+                // todo filter for transient?
+
+                val props = kParameters.map {
+                    kClass.declaredMemberProperties.find { prop -> prop.name == it.name }!!
+                }
+
+                kClass.checkIsSparkified(props, kParameters.map { it.hasAnnotation<ColumnName>() })
+
+                val params = (kParameters zip props).map { (param, prop) ->
+                    // check if the type was a filled-in generic type, otherwise just use the given type
+                    val paramType = typeVariables[param.type.simpleName] ?: param.type
+                    val encoder = encoderFor(
+                        currentType = paramType,
+                        seenTypeSet = seenTypeSet + currentType,
+                        typeVariables = typeVariables,
                     )
-                }.toTypedArray()
-            )
 
-            KComplexTypeWrapper(
-                /* dt = */ structType,
-                /* cls = */ klass.java,
-                /* nullable = */ true
-            )
-        }
+                    val paramName = param.name
+                    val readMethodName = prop.javaGetter!!.name
+                    val writeMethodName = (prop as? KMutableProperty<*>)?.setter?.javaMethod?.name
+
+                    EncoderField(
+                        /* name = */ readMethodName,
+                        /* enc = */ encoder,
+                        /* nullable = */ paramType.isMarkedNullable,
+                        /* metadata = */ Metadata.empty(),
+                        /* readMethod = */ readMethodName.toOption(),
+                        /* writeMethod = */ writeMethodName.toOption(),
+                    )
+                }
+                ProductEncoder<Any>(
+                    /* clsTag = */ ClassTag.apply(jClass),
+                    /* fields = */ params.asScalaSeq(),
+                    //#if sparkMinor >= 3.5
+                    /* outerPointerGetter = */ OuterScopes.getOuterScope(jClass).toOption(),
+                    //#endif
+                )
+            }
 
-        UDTRegistration.exists(klass.jvmName) -> {
-            @Suppress("UNCHECKED_CAST")
-            val dataType = UDTRegistration.getUDTFor(klass.jvmName)
-                .getOrNull()!!
-                .let { it as Class<UserDefinedType<*>> }
-                .getConstructor()
-                .newInstance()
-
-            KSimpleTypeWrapper(
-                /* dt = */ dataType,
-                /* cls = */ klass.java,
-                /* nullable = */ type.isMarkedNullable
-            )
-        }
+            currentType.isDefinedByScalaConstructorParams() -> {
+                if (currentType in seenTypeSet) throw IllegalStateException("Circular reference detected for type $currentType")
+                val constructorParams = currentType.getScalaConstructorParameters(typeVariables, kClass)
 
-        klass.hasAnnotation<SQLUserDefinedType>() -> {
-            val dataType = klass.findAnnotation<SQLUserDefinedType>()!!
-                .udt
-                .java
-                .getConstructor()
-                .newInstance()
-
-            KSimpleTypeWrapper(
-                /* dt = */ dataType,
-                /* cls = */ klass.java,
-                /* nullable = */ type.isMarkedNullable
-            )
-        }
+                val params = constructorParams.map { (paramName, paramType) ->
+                    val encoder = encoderFor(
+                        currentType = paramType,
+                        seenTypeSet = seenTypeSet + currentType,
+                        typeVariables = typeVariables,
+                    )
+                    AgnosticEncoders.EncoderField(
+                        /* name = */ paramName,
+                        /* enc = */ encoder,
+                        /* nullable = */ paramType.isMarkedNullable,
+                        /* metadata = */ Metadata.empty(),
+                        /* readMethod = */ paramName.toOption(),
+                        /* writeMethod = */ null.toOption(),
+                    )
+                }
+                ProductEncoder<Any>(
+                    /* clsTag = */ ClassTag.apply(jClass),
+                    /* fields = */ params.asScalaSeq(),
+                    //#if sparkMinor >= 3.5
+                    /* outerPointerGetter = */ OuterScopes.getOuterScope(jClass).toOption(),
+                    //#endif
+                )
+            }
+
+            // java bean class
+            else -> {
+                if (currentType in seenTypeSet)
+                    throw IllegalStateException("Circular reference detected for type $currentType")
+
+                val properties = getJavaBeanReadableProperties(kClass)
+                val fields = properties.map {
+                    val encoder = encoderFor(
+                        currentType = it.type,
+                        seenTypeSet = seenTypeSet + currentType,
+                        typeVariables = typeVariables,
+                    )
+
+                    EncoderField(
+                        /* name = */ it.propName,
+                        /* enc = */ encoder,
+                        /* nullable = */ encoder.nullable() && !it.hasNonnull,
+                        /* metadata = */ Metadata.empty(),
+                        /* readMethod = */ it.getterName.toOption(),
+                        /* writeMethod = */ it.setterName.toOption(),
+                    )
+                }
 
-        else -> throw IllegalArgumentException("$type is unsupported")
+                JavaBeanEncoder<Any>(
+                    ClassTag.apply(jClass),
+                    fields.asScalaSeq(),
+                )
+            }
+        }
     }
-}
 
-/**
- * Memoized version of [schema]. This ensures the [DataType] of given `type` only
- * has to be calculated once.
- */
-private val memoizedSchema: (type: KType) -> DataType = memoize {
-    schema(it)
-}
+    private data class JavaReadableProperty(
+        val propName: String,
+        val getterName: String,
+        val setterName: String?,
+        val type: KType,
+        val hasNonnull: Boolean,
+    )
 
-private fun transitiveMerge(a: Map<String, KType>, b: Map<String, KType>): Map<String, KType> =
-    a + b.mapValues { a.getOrDefault(it.value.toString(), it.value) }
+    private fun getJavaBeanReadableProperties(klass: KClass<*>): List<JavaReadableProperty> {
+        val functions = klass.declaredMemberFunctions.filter {
+            it.name.startsWith("get") || it.name.startsWith("is") || it.name.startsWith("set")
+        }
 
-/** Wrapper around function with 1 argument to avoid recalculation when a certain argument is queried again. */
-private class Memoize1<in T, out R>(private val function: (T) -> R) : (T) -> R {
-    private val values = ConcurrentHashMap<T, R>()
-    override fun invoke(x: T): R = values.getOrPut(x) { function(x) }
-}
+        val properties = functions.mapNotNull { getter ->
+            if (getter.name.startsWith("set")) return@mapNotNull null
+
+            val propName = getter.name
+                .removePrefix("get")
+                .removePrefix("is")
+                .replaceFirstChar { it.lowercase() }
+            val setter = functions.find {
+                it.name == "set${propName.replaceFirstChar { it.uppercase() }}"
+            }
 
-/** Wrapper around function to avoid recalculation when a certain argument is queried again. */
-private fun <T, R> ((T) -> R).memoized(): (T) -> R = Memoize1(this)
+            JavaReadableProperty(
+                propName = propName,
+                getterName = getter.name,
+                setterName = setter?.name,
+                type = getter.returnType,
+                hasNonnull = getter.hasAnnotation<Nonnull>(),
+            )
+        }
 
-/** Wrapper around function to avoid recalculation when a certain argument is queried again. */
-private fun <T, R> memoize(function: (T) -> R): (T) -> R = Memoize1(function)
+        // Aside from java get/set functions, attempt to get kotlin properties as well, for non data classes
+        val kotlinProps = klass.declaredMemberProperties
+            .filter { it.getter.javaMethod != null } // filter kotlin-facing props
+            .map {
+                val hasSetter = (it as? KMutableProperty<*>)?.setter != null
+                val nameSuffix = it.name.removePrefix("is").replaceFirstChar { it.uppercase() }
+
+                JavaReadableProperty(
+                    propName = it.name,
+                    getterName = if (it.name.startsWith("is")) it.name else "get$nameSuffix",
+                    setterName = if (hasSetter) "set$nameSuffix" else null,
+                    type = it.returnType,
+                    hasNonnull = it.hasAnnotation<Nonnull>(),
+                )
+            }
 
+        return properties + kotlinProps
+    }
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt
index ec840dbe..b051d76b 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt
@@ -51,7 +51,7 @@ import scala.Tuple2
  * ```
  */
 inline fun <KEY, VALUE, reified R> KeyValueGroupedDataset<KEY, VALUE>.mapValues(noinline func: (VALUE) -> R): KeyValueGroupedDataset<KEY, R> =
-    mapValues(MapFunction(func), encoder<R>())
+    mapValues(MapFunction(func), kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -70,7 +70,7 @@ inline fun <KEY, VALUE, reified R> KeyValueGroupedDataset<KEY, VALUE>.mapValues(
  * constraints of their cluster.
  */
 inline fun <KEY, VALUE, reified R> KeyValueGroupedDataset<KEY, VALUE>.mapGroups(noinline func: (KEY, Iterator<VALUE>) -> R): Dataset<R> =
-    mapGroups(MapGroupsFunction(func), encoder<R>())
+    mapGroups(MapGroupsFunction(func), kotlinEncoderFor<R>())
 
 /**
  * (Kotlin-specific)
@@ -104,7 +104,7 @@ inline fun <K, V, reified U> KeyValueGroupedDataset<K, V>.flatMapGroups(
     noinline func: (key: K, values: Iterator<V>) -> Iterator<U>,
 ): Dataset<U> = flatMapGroups(
     FlatMapGroupsFunction(func),
-    encoder<U>(),
+    kotlinEncoderFor<U>(),
 )
 
 
@@ -127,8 +127,8 @@ inline fun <K, V, reified S, reified U> KeyValueGroupedDataset<K, V>.mapGroupsWi
     noinline func: (key: K, values: Iterator<V>, state: GroupState<S>) -> U,
 ): Dataset<U> = mapGroupsWithState(
     MapGroupsWithStateFunction(func),
-    encoder<S>(),
-    encoder<U>(),
+    kotlinEncoderFor<S>(),
+    kotlinEncoderFor<U>(),
 )
 
 /**
@@ -152,8 +152,8 @@ inline fun <K, V, reified S, reified U> KeyValueGroupedDataset<K, V>.mapGroupsWi
     noinline func: (key: K, values: Iterator<V>, state: GroupState<S>) -> U,
 ): Dataset<U> = mapGroupsWithState(
     MapGroupsWithStateFunction(func),
-    encoder<S>(),
-    encoder<U>(),
+    kotlinEncoderFor<S>(),
+    kotlinEncoderFor<U>(),
     timeoutConf,
 )
 
@@ -181,8 +181,8 @@ inline fun <K, V, reified S, reified U> KeyValueGroupedDataset<K, V>.flatMapGrou
 ): Dataset<U> = flatMapGroupsWithState(
     FlatMapGroupsWithStateFunction(func),
     outputMode,
-    encoder<S>(),
-    encoder<U>(),
+    kotlinEncoderFor<S>(),
+    kotlinEncoderFor<U>(),
     timeoutConf,
 )
 
@@ -199,5 +199,5 @@ inline fun <K, V, U, reified R> KeyValueGroupedDataset<K, V>.cogroup(
 ): Dataset<R> = cogroup(
     other,
     CoGroupFunction(func),
-    encoder<R>(),
+    kotlinEncoderFor<R>(),
 )
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/RddDouble.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/RddDouble.kt
index 3ba3ab72..6bc28203 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/RddDouble.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/RddDouble.kt
@@ -20,7 +20,7 @@ inline fun <reified T : Number> JavaRDD<T>.toJavaDoubleRDD(): JavaDoubleRDD =
 
 /** Utility method to convert [JavaDoubleRDD] to [JavaRDD]<[Double]>. */
 @Suppress("UNCHECKED_CAST")
-fun JavaDoubleRDD.toDoubleRDD(): JavaRDD<Double> =
+inline fun JavaDoubleRDD.toDoubleRDD(): JavaRDD<Double> =
     JavaDoubleRDD.toRDD(this).toJavaRDD() as JavaRDD<Double>
 
 /** Add up the elements in this RDD. */
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt
index d2f79aca..00655de0 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt
@@ -34,7 +34,6 @@ import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.api.java.JavaRDDLike
 import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.broadcast.Broadcast
-import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.Dataset
 import org.apache.spark.sql.Row
@@ -45,7 +44,7 @@ import org.apache.spark.streaming.Durations
 import org.apache.spark.streaming.api.java.JavaStreamingContext
 import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR
 import org.jetbrains.kotlinx.spark.api.tuples.*
-import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions
+import scala.reflect.ClassTag
 import java.io.Serializable
 
 /**
@@ -76,7 +75,7 @@ class KSparkSession(val spark: SparkSession) {
     inline fun <reified T> dsOf(vararg arg: T): Dataset<T> = spark.dsOf(*arg)
 
     /** Creates new empty dataset of type [T]. */
-    inline fun <reified T> emptyDataset(): Dataset<T> = spark.emptyDataset(encoder<T>())
+    inline fun <reified T> emptyDataset(): Dataset<T> = spark.emptyDataset(kotlinEncoderFor<T>())
 
     /** Utility method to create dataframe from *array or vararg arguments */
     inline fun <reified T> dfOf(vararg arg: T): Dataset<Row> = spark.dfOf(*arg)
@@ -227,7 +226,7 @@ enum class SparkLogLevel {
  * Returns the Spark context associated with this Spark session.
  */
 val SparkSession.sparkContext: SparkContext
-    get() = KSparkExtensions.sparkContext(this)
+    get() = sparkContext()
 
 /**
  * Wrapper for spark creation which allows setting different spark params.
@@ -339,7 +338,7 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func
 fun withSparkStreaming(
     batchDuration: Duration = Durations.seconds(1L),
     checkpointPath: String? = null,
-    hadoopConf: Configuration = SparkHadoopUtil.get().conf(),
+    hadoopConf: Configuration = getDefaultHadoopConf(),
     createOnError: Boolean = false,
     props: Map<String, Any> = emptyMap(),
     master: String = SparkConf().get("spark.master", "local[*]"),
@@ -386,6 +385,18 @@ fun withSparkStreaming(
     ssc.stop()
 }
 
+// calling org.apache.spark.deploy.`SparkHadoopUtil$`.`MODULE$`.get().conf()
+private fun getDefaultHadoopConf(): Configuration {
+    val klass = Class.forName("org.apache.spark.deploy.SparkHadoopUtil$")
+    val moduleField = klass.getField("MODULE$").also { it.isAccessible = true }
+    val module = moduleField.get(null)
+    val getMethod = klass.getMethod("get").also { it.isAccessible = true }
+    val sparkHadoopUtil = getMethod.invoke(module)
+    val confMethod = sparkHadoopUtil.javaClass.getMethod("conf").also { it.isAccessible = true }
+    val conf = confMethod.invoke(sparkHadoopUtil) as Configuration
+
+    return conf
+}
 
 /**
  * Broadcast a read-only variable to the cluster, returning a
@@ -396,7 +407,7 @@ fun withSparkStreaming(
  * @return `Broadcast` object, a read-only variable cached on each machine
  */
 inline fun <reified T> SparkSession.broadcast(value: T): Broadcast<T> = try {
-    sparkContext.broadcast(value, encoder<T>().clsTag())
+    sparkContext.broadcast(value, ClassTag.apply(T::class.java))
 } catch (e: ClassNotFoundException) {
     JavaSparkContext(sparkContext).broadcast(value)
 }
@@ -416,7 +427,7 @@ inline fun <reified T> SparkSession.broadcast(value: T): Broadcast<T> = try {
     DeprecationLevel.WARNING
 )
 inline fun <reified T> SparkContext.broadcast(value: T): Broadcast<T> = try {
-    broadcast(value, encoder<T>().clsTag())
+    broadcast(value, ClassTag.apply(T::class.java))
 } catch (e: ClassNotFoundException) {
     JavaSparkContext(this).broadcast(value)
 }
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt
index bd08d92c..18b92cec 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt
@@ -53,7 +53,7 @@ class UDFWrapper0(private val udfName: String) {
 @OptIn(ExperimentalStdlibApi::class)
 @Deprecated("Use new UDF notation", ReplaceWith("this.register(name, func)"), DeprecationLevel.HIDDEN)
 inline fun <reified R> UDFRegistration.register(name: String, noinline func: () -> R): UDFWrapper0 {
-    register(name, UDF0(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF0(func), schemaFor<R>())
     return UDFWrapper0(name)
 }
 
@@ -78,7 +78,7 @@ class UDFWrapper1(private val udfName: String) {
 @Deprecated("Use new UDF notation", ReplaceWith("this.register(name, func)"), DeprecationLevel.HIDDEN)
 inline fun <reified T0, reified R> UDFRegistration.register(name: String, noinline func: (T0) -> R): UDFWrapper1 {
     T0::class.checkForValidType("T0")
-    register(name, UDF1(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF1(func), schemaFor<R>())
     return UDFWrapper1(name)
 }
 
@@ -107,7 +107,7 @@ inline fun <reified T0, reified T1, reified R> UDFRegistration.register(
 ): UDFWrapper2 {
     T0::class.checkForValidType("T0")
     T1::class.checkForValidType("T1")
-    register(name, UDF2(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF2(func), schemaFor<R>())
     return UDFWrapper2(name)
 }
 
@@ -137,7 +137,7 @@ inline fun <reified T0, reified T1, reified T2, reified R> UDFRegistration.regis
     T0::class.checkForValidType("T0")
     T1::class.checkForValidType("T1")
     T2::class.checkForValidType("T2")
-    register(name, UDF3(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF3(func), schemaFor<R>())
     return UDFWrapper3(name)
 }
 
@@ -168,7 +168,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified R> UDFRegist
     T1::class.checkForValidType("T1")
     T2::class.checkForValidType("T2")
     T3::class.checkForValidType("T3")
-    register(name, UDF4(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF4(func), schemaFor<R>())
     return UDFWrapper4(name)
 }
 
@@ -200,7 +200,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T2::class.checkForValidType("T2")
     T3::class.checkForValidType("T3")
     T4::class.checkForValidType("T4")
-    register(name, UDF5(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF5(func), schemaFor<R>())
     return UDFWrapper5(name)
 }
 
@@ -240,7 +240,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T3::class.checkForValidType("T3")
     T4::class.checkForValidType("T4")
     T5::class.checkForValidType("T5")
-    register(name, UDF6(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF6(func), schemaFor<R>())
     return UDFWrapper6(name)
 }
 
@@ -282,7 +282,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T4::class.checkForValidType("T4")
     T5::class.checkForValidType("T5")
     T6::class.checkForValidType("T6")
-    register(name, UDF7(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF7(func), schemaFor<R>())
     return UDFWrapper7(name)
 }
 
@@ -326,7 +326,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T5::class.checkForValidType("T5")
     T6::class.checkForValidType("T6")
     T7::class.checkForValidType("T7")
-    register(name, UDF8(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF8(func), schemaFor<R>())
     return UDFWrapper8(name)
 }
 
@@ -372,7 +372,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T6::class.checkForValidType("T6")
     T7::class.checkForValidType("T7")
     T8::class.checkForValidType("T8")
-    register(name, UDF9(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF9(func), schemaFor<R>())
     return UDFWrapper9(name)
 }
 
@@ -432,7 +432,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T7::class.checkForValidType("T7")
     T8::class.checkForValidType("T8")
     T9::class.checkForValidType("T9")
-    register(name, UDF10(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF10(func), schemaFor<R>())
     return UDFWrapper10(name)
 }
 
@@ -495,7 +495,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T8::class.checkForValidType("T8")
     T9::class.checkForValidType("T9")
     T10::class.checkForValidType("T10")
-    register(name, UDF11(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF11(func), schemaFor<R>())
     return UDFWrapper11(name)
 }
 
@@ -561,7 +561,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T9::class.checkForValidType("T9")
     T10::class.checkForValidType("T10")
     T11::class.checkForValidType("T11")
-    register(name, UDF12(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF12(func), schemaFor<R>())
     return UDFWrapper12(name)
 }
 
@@ -630,7 +630,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T10::class.checkForValidType("T10")
     T11::class.checkForValidType("T11")
     T12::class.checkForValidType("T12")
-    register(name, UDF13(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF13(func), schemaFor<R>())
     return UDFWrapper13(name)
 }
 
@@ -702,7 +702,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T11::class.checkForValidType("T11")
     T12::class.checkForValidType("T12")
     T13::class.checkForValidType("T13")
-    register(name, UDF14(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF14(func), schemaFor<R>())
     return UDFWrapper14(name)
 }
 
@@ -777,7 +777,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T12::class.checkForValidType("T12")
     T13::class.checkForValidType("T13")
     T14::class.checkForValidType("T14")
-    register(name, UDF15(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF15(func), schemaFor<R>())
     return UDFWrapper15(name)
 }
 
@@ -855,7 +855,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T13::class.checkForValidType("T13")
     T14::class.checkForValidType("T14")
     T15::class.checkForValidType("T15")
-    register(name, UDF16(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF16(func), schemaFor<R>())
     return UDFWrapper16(name)
 }
 
@@ -936,7 +936,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T14::class.checkForValidType("T14")
     T15::class.checkForValidType("T15")
     T16::class.checkForValidType("T16")
-    register(name, UDF17(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF17(func), schemaFor<R>())
     return UDFWrapper17(name)
 }
 
@@ -1020,7 +1020,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T15::class.checkForValidType("T15")
     T16::class.checkForValidType("T16")
     T17::class.checkForValidType("T17")
-    register(name, UDF18(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF18(func), schemaFor<R>())
     return UDFWrapper18(name)
 }
 
@@ -1107,7 +1107,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T16::class.checkForValidType("T16")
     T17::class.checkForValidType("T17")
     T18::class.checkForValidType("T18")
-    register(name, UDF19(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF19(func), schemaFor<R>())
     return UDFWrapper19(name)
 }
 
@@ -1197,7 +1197,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T17::class.checkForValidType("T17")
     T18::class.checkForValidType("T18")
     T19::class.checkForValidType("T19")
-    register(name, UDF20(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF20(func), schemaFor<R>())
     return UDFWrapper20(name)
 }
 
@@ -1290,7 +1290,7 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T18::class.checkForValidType("T18")
     T19::class.checkForValidType("T19")
     T20::class.checkForValidType("T20")
-    register(name, UDF21(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF21(func), schemaFor<R>())
     return UDFWrapper21(name)
 }
 
@@ -1386,6 +1386,6 @@ inline fun <reified T0, reified T1, reified T2, reified T3, reified T4, reified
     T19::class.checkForValidType("T19")
     T20::class.checkForValidType("T20")
     T21::class.checkForValidType("T21")
-    register(name, UDF22(func), schema(typeOf<R>()).unWrap())
+    register(name, UDF22(func), schemaFor<R>())
     return UDFWrapper22(name)
 }
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedAggregateFunction.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedAggregateFunction.kt
index 595fe0fa..11e14c5f 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedAggregateFunction.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedAggregateFunction.kt
@@ -41,8 +41,8 @@ inline fun <reified IN, reified BUF, reified OUT> aggregatorOf(
     noinline reduce: (b: BUF, a: IN) -> BUF,
     noinline merge: (b1: BUF, b2: BUF) -> BUF,
     noinline finish: (reduction: BUF) -> OUT,
-    bufferEncoder: Encoder<BUF> = encoder(),
-    outputEncoder: Encoder<OUT> = encoder(),
+    bufferEncoder: Encoder<BUF> = kotlinEncoderFor(),
+    outputEncoder: Encoder<OUT> = kotlinEncoderFor(),
 ): Aggregator<IN, BUF, OUT> = Aggregator(zero, reduce, merge, finish, bufferEncoder, outputEncoder)
 
 class Aggregator<IN, BUF, OUT>(
@@ -129,10 +129,10 @@ inline fun <reified IN, reified OUT, reified AGG : Aggregator<IN, *, OUT>> udafU
     IN::class.checkForValidType("IN")
 
     return UserDefinedFunction1(
-        udf = functions.udaf(agg, encoder<IN>())
+        udf = functions.udaf(agg, kotlinEncoderFor<IN>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<OUT>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<OUT>(),
+        encoder = kotlinEncoderFor<OUT>(),
     )
 }
 
@@ -160,8 +160,8 @@ inline fun <reified IN, reified BUF, reified OUT> udaf(
     noinline reduce: (b: BUF, a: IN) -> BUF,
     noinline merge: (b1: BUF, b2: BUF) -> BUF,
     noinline finish: (reduction: BUF) -> OUT,
-    bufferEncoder: Encoder<BUF> = encoder(),
-    outputEncoder: Encoder<OUT> = encoder(),
+    bufferEncoder: Encoder<BUF> = kotlinEncoderFor(),
+    outputEncoder: Encoder<OUT> = kotlinEncoderFor(),
     nondeterministic: Boolean = false,
 ): UserDefinedFunction1<IN, OUT> = udafUnnamed(
     aggregatorOf(
@@ -202,8 +202,8 @@ inline fun <reified IN, reified BUF, reified OUT> udaf(
     noinline reduce: (b: BUF, a: IN) -> BUF,
     noinline merge: (b1: BUF, b2: BUF) -> BUF,
     noinline finish: (reduction: BUF) -> OUT,
-    bufferEncoder: Encoder<BUF> = encoder(),
-    outputEncoder: Encoder<OUT> = encoder(),
+    bufferEncoder: Encoder<BUF> = kotlinEncoderFor(),
+    outputEncoder: Encoder<OUT> = kotlinEncoderFor(),
     nondeterministic: Boolean = false,
 ): NamedUserDefinedFunction1<IN, OUT> = udaf(
     name = name,
@@ -279,8 +279,8 @@ inline fun <reified IN, reified BUF, reified OUT> UDFRegistration.register(
     noinline reduce: (b: BUF, a: IN) -> BUF,
     noinline merge: (b1: BUF, b2: BUF) -> BUF,
     noinline finish: (reduction: BUF) -> OUT,
-    bufferEncoder: Encoder<BUF> = encoder(),
-    outputEncoder: Encoder<OUT> = encoder(),
+    bufferEncoder: Encoder<BUF> = kotlinEncoderFor(),
+    outputEncoder: Encoder<OUT> = kotlinEncoderFor(),
     nondeterministic: Boolean = false,
 ): NamedUserDefinedFunction1<IN, OUT> = register(
     udaf(name, zero, reduce, merge, finish, bufferEncoder, outputEncoder, nondeterministic)
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunction.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunction.kt
index 3fabf6d2..60e8f7c8 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunction.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunction.kt
@@ -23,6 +23,7 @@ package org.jetbrains.kotlinx.spark.api
 
 import org.apache.spark.sql.*
 import org.apache.spark.sql.types.DataType
+import org.apache.spark.sql.types.StructType
 import scala.collection.Seq
 import java.io.Serializable
 import kotlin.reflect.KClass
@@ -31,12 +32,6 @@ import kotlin.reflect.full.isSubclassOf
 import kotlin.reflect.full.primaryConstructor
 import org.apache.spark.sql.expressions.UserDefinedFunction as SparkUserDefinedFunction
 
-/** Unwraps [DataTypeWithClass]. */
-fun DataType.unWrap(): DataType =
-    when (this) {
-        is DataTypeWithClass -> DataType.fromJson(dt().json())
-        else -> this
-    }
 
 /**
  * Checks if [this] is of a valid type for a UDF, otherwise it throws a [TypeOfUDFParameterNotSupportedException]
@@ -74,9 +69,9 @@ class TypeOfUDFParameterNotSupportedException(kClass: KClass<*>, parameterName:
 )
 
 @JvmName("arrayColumnAsSeq")
-fun <DsType, T> TypedColumn<DsType, Array<T>>.asSeq(): TypedColumn<DsType, Seq<T>> = typed()
+inline fun <DsType, reified T> TypedColumn<DsType, Array<T>>.asSeq(): TypedColumn<DsType, Seq<T>> = typed()
 @JvmName("iterableColumnAsSeq")
-fun <DsType, T, I : Iterable<T>> TypedColumn<DsType, I>.asSeq(): TypedColumn<DsType, Seq<T>> = typed()
+inline fun <DsType, reified T, I : Iterable<T>> TypedColumn<DsType, I>.asSeq(): TypedColumn<DsType, Seq<T>> = typed()
 @JvmName("byteArrayColumnAsSeq")
 fun <DsType> TypedColumn<DsType, ByteArray>.asSeq(): TypedColumn<DsType, Seq<Byte>> = typed()
 @JvmName("charArrayColumnAsSeq")
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctionVararg.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctionVararg.kt
index e23aa160..6ffd9ff6 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctionVararg.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctionVararg.kt
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -135,10 +135,10 @@ inline fun <reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> ByteArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> ByteArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -334,10 +334,10 @@ inline fun <reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> ShortArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> ShortArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -533,10 +533,10 @@ inline fun <reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> IntArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> IntArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -732,10 +732,10 @@ inline fun <reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> LongArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> LongArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -931,10 +931,10 @@ inline fun <reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> FloatArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> FloatArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -1130,10 +1130,10 @@ inline fun <reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> DoubleArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> DoubleArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -1325,14 +1325,12 @@ inline fun <reified R> udf(
     nondeterministic: Boolean = false,
     varargFunc: UDF1<BooleanArray, R>,
 ): UserDefinedFunctionVararg<Boolean, R> {
-
-
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> BooleanArray(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> BooleanArray(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
@@ -1528,10 +1526,10 @@ inline fun <reified T, reified R> udf(
 
     return withAllowUntypedScalaUDF {
         UserDefinedFunctionVararg(
-            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> Array<T>(i, init::call) }, schema(typeOf<R>()).unWrap())
+            udf = functions.udf(VarargUnwrapper(varargFunc) { i, init -> Array<T>(i, init::apply) }, schemaFor<R>())
                 .let { if (nondeterministic) it.asNondeterministic() else it }
                 .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-            encoder = encoder<R>(),
+            encoder = kotlinEncoderFor<R>(),
         )
     }
 }
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctions.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctions.kt
index c5fa749a..90676974 100644
--- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctions.kt
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UserDefinedFunctions.kt
@@ -23,6 +23,7 @@ package org.jetbrains.kotlinx.spark.api
 import org.apache.spark.sql.*
 import org.apache.spark.sql.api.java.*
 import kotlin.reflect.*
+import java.io.Serializable
 import org.apache.spark.sql.expressions.UserDefinedFunction as SparkUserDefinedFunction
 
 
@@ -194,7 +195,7 @@ inline fun <reified R> UDFRegistration.register(
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF0<R> : org.apache.spark.sql.api.java.UDF0<R> { override fun call(): R }
+fun interface UDF0<R> : Serializable, org.apache.spark.sql.api.java.UDF0<R> { override fun call(): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction0]) instance based on the (lambda) function [func].
@@ -227,10 +228,10 @@ inline fun <reified R> udf(
 
 
     return UserDefinedFunction0(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -421,7 +422,10 @@ inline fun <reified T1, reified R> UDFRegistration.register(
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF1<T1, R> : org.apache.spark.sql.api.java.UDF1<T1, R> { override fun call(t1: T1): R }
+fun interface UDF1<T1, R> : Serializable, org.apache.spark.sql.api.java.UDF1<T1, R>, org.jetbrains.kotlinx.spark.extensions.VarargUnwrapperUDT1<T1, R> {
+    override fun call(t1: T1): R
+    override fun apply(t1: T1): R = call(t1)
+}
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction1]) instance based on the (lambda) function [func].
@@ -454,10 +458,10 @@ inline fun <reified T1, reified R> udf(
     T1::class.checkForValidType("T1")
 
     return UserDefinedFunction1(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -646,7 +650,10 @@ inline fun <reified T1, reified T2, reified R> UDFRegistration.register(
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF2<T1, T2, R> : org.apache.spark.sql.api.java.UDF2<T1, T2, R> { override fun call(t1: T1, t2: T2): R }
+fun interface UDF2<T1, T2, R> : Serializable, org.apache.spark.sql.api.java.UDF2<T1, T2, R>, org.jetbrains.kotlinx.spark.extensions.VarargUnwrapperUDT2<T1, T2, R> {
+    override fun call(t1: T1, t2: T2): R
+    override fun apply(t1: T1, t2: T2): R = call(t1, t2)
+}
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction2]) instance based on the (lambda) function [func].
@@ -680,10 +687,10 @@ inline fun <reified T1, reified T2, reified R> udf(
     T2::class.checkForValidType("T2")
 
     return UserDefinedFunction2(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -872,7 +879,7 @@ inline fun <reified T1, reified T2, reified T3, reified R> UDFRegistration.regis
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF3<T1, T2, T3, R> : org.apache.spark.sql.api.java.UDF3<T1, T2, T3, R> { override fun call(t1: T1, t2: T2, t3: T3): R }
+fun interface UDF3<T1, T2, T3, R> : Serializable, org.apache.spark.sql.api.java.UDF3<T1, T2, T3, R> { override fun call(t1: T1, t2: T2, t3: T3): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction3]) instance based on the (lambda) function [func].
@@ -907,10 +914,10 @@ inline fun <reified T1, reified T2, reified T3, reified R> udf(
     T3::class.checkForValidType("T3")
 
     return UserDefinedFunction3(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -1099,7 +1106,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified R> UDFRegist
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF4<T1, T2, T3, T4, R> : org.apache.spark.sql.api.java.UDF4<T1, T2, T3, T4, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4): R }
+fun interface UDF4<T1, T2, T3, T4, R> : Serializable, org.apache.spark.sql.api.java.UDF4<T1, T2, T3, T4, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction4]) instance based on the (lambda) function [func].
@@ -1135,10 +1142,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified R> udf(
     T4::class.checkForValidType("T4")
 
     return UserDefinedFunction4(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -1327,7 +1334,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF5<T1, T2, T3, T4, T5, R> : org.apache.spark.sql.api.java.UDF5<T1, T2, T3, T4, T5, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5): R }
+fun interface UDF5<T1, T2, T3, T4, T5, R> : Serializable, org.apache.spark.sql.api.java.UDF5<T1, T2, T3, T4, T5, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction5]) instance based on the (lambda) function [func].
@@ -1364,10 +1371,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T5::class.checkForValidType("T5")
 
     return UserDefinedFunction5(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -1556,7 +1563,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF6<T1, T2, T3, T4, T5, T6, R> : org.apache.spark.sql.api.java.UDF6<T1, T2, T3, T4, T5, T6, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6): R }
+fun interface UDF6<T1, T2, T3, T4, T5, T6, R> : Serializable, org.apache.spark.sql.api.java.UDF6<T1, T2, T3, T4, T5, T6, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction6]) instance based on the (lambda) function [func].
@@ -1594,10 +1601,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T6::class.checkForValidType("T6")
 
     return UserDefinedFunction6(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -1786,7 +1793,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF7<T1, T2, T3, T4, T5, T6, T7, R> : org.apache.spark.sql.api.java.UDF7<T1, T2, T3, T4, T5, T6, T7, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7): R }
+fun interface UDF7<T1, T2, T3, T4, T5, T6, T7, R> : Serializable, org.apache.spark.sql.api.java.UDF7<T1, T2, T3, T4, T5, T6, T7, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction7]) instance based on the (lambda) function [func].
@@ -1825,10 +1832,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T7::class.checkForValidType("T7")
 
     return UserDefinedFunction7(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -2017,7 +2024,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF8<T1, T2, T3, T4, T5, T6, T7, T8, R> : org.apache.spark.sql.api.java.UDF8<T1, T2, T3, T4, T5, T6, T7, T8, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8): R }
+fun interface UDF8<T1, T2, T3, T4, T5, T6, T7, T8, R> : Serializable, org.apache.spark.sql.api.java.UDF8<T1, T2, T3, T4, T5, T6, T7, T8, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction8]) instance based on the (lambda) function [func].
@@ -2057,10 +2064,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T8::class.checkForValidType("T8")
 
     return UserDefinedFunction8(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -2249,7 +2256,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> : org.apache.spark.sql.api.java.UDF9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9): R }
+fun interface UDF9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> : Serializable, org.apache.spark.sql.api.java.UDF9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction9]) instance based on the (lambda) function [func].
@@ -2290,10 +2297,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T9::class.checkForValidType("T9")
 
     return UserDefinedFunction9(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -2482,7 +2489,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> : org.apache.spark.sql.api.java.UDF10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10): R }
+fun interface UDF10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> : Serializable, org.apache.spark.sql.api.java.UDF10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction10]) instance based on the (lambda) function [func].
@@ -2524,10 +2531,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T10::class.checkForValidType("T10")
 
     return UserDefinedFunction10(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -2716,7 +2723,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> : org.apache.spark.sql.api.java.UDF11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11): R }
+fun interface UDF11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> : Serializable, org.apache.spark.sql.api.java.UDF11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction11]) instance based on the (lambda) function [func].
@@ -2759,10 +2766,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T11::class.checkForValidType("T11")
 
     return UserDefinedFunction11(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -2951,7 +2958,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> : org.apache.spark.sql.api.java.UDF12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12): R }
+fun interface UDF12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> : Serializable, org.apache.spark.sql.api.java.UDF12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction12]) instance based on the (lambda) function [func].
@@ -2995,10 +3002,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T12::class.checkForValidType("T12")
 
     return UserDefinedFunction12(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -3187,7 +3194,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> : org.apache.spark.sql.api.java.UDF13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13): R }
+fun interface UDF13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> : Serializable, org.apache.spark.sql.api.java.UDF13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction13]) instance based on the (lambda) function [func].
@@ -3232,10 +3239,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T13::class.checkForValidType("T13")
 
     return UserDefinedFunction13(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -3424,7 +3431,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> : org.apache.spark.sql.api.java.UDF14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14): R }
+fun interface UDF14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> : Serializable, org.apache.spark.sql.api.java.UDF14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction14]) instance based on the (lambda) function [func].
@@ -3470,10 +3477,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T14::class.checkForValidType("T14")
 
     return UserDefinedFunction14(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -3662,7 +3669,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> : org.apache.spark.sql.api.java.UDF15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15): R }
+fun interface UDF15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> : Serializable, org.apache.spark.sql.api.java.UDF15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction15]) instance based on the (lambda) function [func].
@@ -3709,10 +3716,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T15::class.checkForValidType("T15")
 
     return UserDefinedFunction15(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -3901,7 +3908,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> : org.apache.spark.sql.api.java.UDF16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16): R }
+fun interface UDF16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> : Serializable, org.apache.spark.sql.api.java.UDF16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction16]) instance based on the (lambda) function [func].
@@ -3949,10 +3956,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T16::class.checkForValidType("T16")
 
     return UserDefinedFunction16(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -4141,7 +4148,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R> : org.apache.spark.sql.api.java.UDF17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17): R }
+fun interface UDF17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R> : Serializable, org.apache.spark.sql.api.java.UDF17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction17]) instance based on the (lambda) function [func].
@@ -4190,10 +4197,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T17::class.checkForValidType("T17")
 
     return UserDefinedFunction17(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -4382,7 +4389,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> : org.apache.spark.sql.api.java.UDF18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18): R }
+fun interface UDF18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> : Serializable, org.apache.spark.sql.api.java.UDF18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction18]) instance based on the (lambda) function [func].
@@ -4432,10 +4439,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T18::class.checkForValidType("T18")
 
     return UserDefinedFunction18(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -4624,7 +4631,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R> : org.apache.spark.sql.api.java.UDF19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19): R }
+fun interface UDF19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R> : Serializable, org.apache.spark.sql.api.java.UDF19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction19]) instance based on the (lambda) function [func].
@@ -4675,10 +4682,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T19::class.checkForValidType("T19")
 
     return UserDefinedFunction19(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -4867,7 +4874,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R> : org.apache.spark.sql.api.java.UDF20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20): R }
+fun interface UDF20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R> : Serializable, org.apache.spark.sql.api.java.UDF20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction20]) instance based on the (lambda) function [func].
@@ -4919,10 +4926,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T20::class.checkForValidType("T20")
 
     return UserDefinedFunction20(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -5111,7 +5118,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R> : org.apache.spark.sql.api.java.UDF21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21): R }
+fun interface UDF21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R> : Serializable, org.apache.spark.sql.api.java.UDF21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction21]) instance based on the (lambda) function [func].
@@ -5164,10 +5171,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T21::class.checkForValidType("T21")
 
     return UserDefinedFunction21(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
@@ -5356,7 +5363,7 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
 
 
 /** Kotlin wrapper around UDF interface to ensure nullability in types. */
-fun interface UDF22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R> : org.apache.spark.sql.api.java.UDF22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21, t22: T22): R }
+fun interface UDF22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R> : Serializable, org.apache.spark.sql.api.java.UDF22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R> { override fun call(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21, t22: T22): R }
 
 /**
  * Defines a named UDF ([NamedUserDefinedFunction22]) instance based on the (lambda) function [func].
@@ -5410,10 +5417,10 @@ inline fun <reified T1, reified T2, reified T3, reified T4, reified T5, reified
     T22::class.checkForValidType("T22")
 
     return UserDefinedFunction22(
-        udf = functions.udf(func, schema(typeOf<R>()).unWrap())
+        udf = functions.udf(func, schemaFor<R>())
             .let { if (nondeterministic) it.asNondeterministic() else it }
             .let { if (typeOf<R>().isMarkedNullable) it else it.asNonNullable() },
-        encoder = encoder<R>(),
+        encoder = kotlinEncoderFor<R>(),
     )
 }
 
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Utils.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Utils.kt
new file mode 100644
index 00000000..3ceaae5a
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Utils.kt
@@ -0,0 +1,7 @@
+package org.jetbrains.kotlinx.spark.api
+
+const val KOTLIN_VERSION = /*$"\""+kotlin+"\""$*/ /*-*/ ""
+const val SCALA_VERSION = /*$"\""+scala+"\""$*/ /*-*/ ""
+const val SCALA_COMPAT_VERSION = /*$"\""+scalaCompat+"\""$*/ /*-*/ ""
+const val SPARK_VERSION = /*$"\""+spark+"\""$*/ /*-*/ ""
+const val SPARK_MINOR_VERSION = /*$"\""+sparkMinor+"\""$*/ /*-*/ ""
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/plugin/annotations/Sparkify.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/plugin/annotations/Sparkify.kt
new file mode 100644
index 00000000..dfcafe07
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/plugin/annotations/Sparkify.kt
@@ -0,0 +1,27 @@
+package org.jetbrains.kotlinx.spark.api.plugin.annotations
+
+
+/**
+ * Annotate Data Classes with this annotation
+ * to make them encodable by Spark.
+ *
+ * This requires the Gradle Plugin "org.jetbrains.kotlinx.spark.plugin.gradle-plugin"
+ * to be enabled for your project.
+ *
+ * In practice, this annotation will generate `@get:JvmName("propertyName")`
+ * for each argument in the primary constructor. This will satisfy the Spark Property
+ * encoder with the expectation of there being a "propertyName()" getter-function for each property.
+ *
+ * See [ColumnName] for custom column names.
+ */
+@Target(AnnotationTarget.CLASS)
+annotation class Sparkify
+
+/**
+ * Requires the data class to have the [@Sparkify][Sparkify] annotation!
+ *
+ * Annotate the primary constructor arguments with this annotation to
+ * specify a custom column name for the Spark Dataset.
+ */
+@Target(AnnotationTarget.VALUE_PARAMETER)
+annotation class ColumnName(val name: String)
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DatePeriodUdt.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DatePeriodUdt.kt
new file mode 100644
index 00000000..3705cb5a
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DatePeriodUdt.kt
@@ -0,0 +1,27 @@
+package org.jetbrains.kotlinx.spark.api.udts
+
+import kotlinx.datetime.DatePeriod
+import kotlinx.datetime.toJavaPeriod
+import kotlinx.datetime.toKotlinDatePeriod
+import org.apache.spark.sql.catalyst.util.IntervalUtils
+import org.apache.spark.sql.types.UserDefinedType
+import org.apache.spark.sql.types.YearMonthIntervalType
+
+/**
+ * NOTE: Just like java.time.DatePeriod, this is truncated to months.
+ */
+class DatePeriodUdt : UserDefinedType<DatePeriod>() {
+
+    override fun userClass(): Class<DatePeriod> = DatePeriod::class.java
+    override fun deserialize(datum: Any?): DatePeriod? =
+        when (datum) {
+            null -> null
+            is Int -> IntervalUtils.monthsToPeriod(datum).toKotlinDatePeriod()
+            else -> throw IllegalArgumentException("Unsupported datum: $datum")
+        }
+
+    override fun serialize(obj: DatePeriod?): Int? =
+        obj?.let { IntervalUtils.periodToMonths(it.toJavaPeriod()) }
+
+    override fun sqlType(): YearMonthIntervalType = YearMonthIntervalType.apply()
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DateTimePeriodUdt.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DateTimePeriodUdt.kt
new file mode 100644
index 00000000..3b939cf9
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DateTimePeriodUdt.kt
@@ -0,0 +1,46 @@
+package org.jetbrains.kotlinx.spark.api.udts
+
+import kotlinx.datetime.DateTimePeriod
+import org.apache.spark.sql.types.CalendarIntervalType
+import org.apache.spark.sql.types.`CalendarIntervalType$`
+import org.apache.spark.sql.types.UserDefinedType
+import org.apache.spark.unsafe.types.CalendarInterval
+import kotlin.time.Duration.Companion.hours
+import kotlin.time.Duration.Companion.minutes
+import kotlin.time.Duration.Companion.nanoseconds
+import kotlin.time.Duration.Companion.seconds
+
+/**
+ * NOTE: Just like java.time.DatePeriod, this is truncated to months.
+ */
+class DateTimePeriodUdt : UserDefinedType<DateTimePeriod>() {
+
+    override fun userClass(): Class<DateTimePeriod> = DateTimePeriod::class.java
+    override fun deserialize(datum: Any?): DateTimePeriod? =
+        when (datum) {
+            null -> null
+            is CalendarInterval ->
+                DateTimePeriod(
+                    months = datum.months,
+                    days = datum.days,
+                    nanoseconds = datum.microseconds * 1_000,
+                )
+
+            else -> throw IllegalArgumentException("Unsupported datum: $datum")
+        }
+
+    override fun serialize(obj: DateTimePeriod?): CalendarInterval? =
+        obj?.let {
+            CalendarInterval(
+                /* months = */ obj.months + obj.years * 12,
+                /* days = */ obj.days,
+                /* microseconds = */
+                (obj.hours.hours +
+                        obj.minutes.minutes +
+                        obj.seconds.seconds +
+                        obj.nanoseconds.nanoseconds).inWholeMicroseconds,
+            )
+        }
+
+    override fun sqlType(): CalendarIntervalType = `CalendarIntervalType$`.`MODULE$`
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DurationUdt.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DurationUdt.kt
new file mode 100644
index 00000000..ff1e5df4
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/DurationUdt.kt
@@ -0,0 +1,46 @@
+package org.jetbrains.kotlinx.spark.api.udts
+
+import org.apache.spark.sql.catalyst.util.IntervalUtils
+import org.apache.spark.sql.types.DataType
+import org.apache.spark.sql.types.DayTimeIntervalType
+import org.apache.spark.sql.types.UserDefinedType
+import kotlin.time.Duration
+import kotlin.time.Duration.Companion.milliseconds
+import kotlin.time.Duration.Companion.nanoseconds
+import kotlin.time.toJavaDuration
+import kotlin.time.toKotlinDuration
+
+// TODO Fails, likely because Duration is a value class.
+class DurationUdt : UserDefinedType<Duration>() {
+
+    override fun userClass(): Class<Duration> = Duration::class.java
+    override fun deserialize(datum: Any?): Duration? =
+        when (datum) {
+            null -> null
+            is Long -> IntervalUtils.microsToDuration(datum).toKotlinDuration()
+//            is Long -> IntervalUtils.microsToDuration(datum).toKotlinDuration().let {
+//                // store in nanos
+//                it.inWholeNanoseconds shl 1
+//            }
+            else -> throw IllegalArgumentException("Unsupported datum: $datum")
+        }
+
+//    override fun serialize(obj: Duration): Long =
+//        IntervalUtils.durationToMicros(obj.toJavaDuration())
+
+    fun serialize(obj: Long): Long? =
+        obj?.let { rawValue ->
+            val unitDiscriminator = rawValue.toInt() and 1
+            fun isInNanos() = unitDiscriminator == 0
+            val value = rawValue shr 1
+            val duration = if (isInNanos()) value.nanoseconds else value.milliseconds
+
+            IntervalUtils.durationToMicros(duration.toJavaDuration())
+        }
+
+    override fun serialize(obj: Duration): Long? =
+        obj?.let { IntervalUtils.durationToMicros(it.toJavaDuration()) }
+
+
+    override fun sqlType(): DataType = DayTimeIntervalType.apply()
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/InstantUdt.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/InstantUdt.kt
new file mode 100644
index 00000000..7b8ba110
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/InstantUdt.kt
@@ -0,0 +1,26 @@
+package org.jetbrains.kotlinx.spark.api.udts
+
+import kotlinx.datetime.Instant
+import kotlinx.datetime.toJavaInstant
+import kotlinx.datetime.toKotlinInstant
+import org.apache.spark.sql.catalyst.util.DateTimeUtils
+import org.apache.spark.sql.types.DataType
+import org.apache.spark.sql.types.`TimestampType$`
+import org.apache.spark.sql.types.UserDefinedType
+
+
+class InstantUdt : UserDefinedType<Instant>() {
+
+    override fun userClass(): Class<Instant> = Instant::class.java
+    override fun deserialize(datum: Any?): Instant? =
+        when (datum) {
+            null -> null
+            is Long -> DateTimeUtils.microsToInstant(datum).toKotlinInstant()
+            else -> throw IllegalArgumentException("Unsupported datum: $datum")
+        }
+
+    override fun serialize(obj: Instant?): Long? =
+        obj?.let { DateTimeUtils.instantToMicros(it.toJavaInstant()) }
+
+    override fun sqlType(): DataType = `TimestampType$`.`MODULE$`
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/LocalDateTimeUdt.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/LocalDateTimeUdt.kt
new file mode 100644
index 00000000..7dd4fa0d
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/LocalDateTimeUdt.kt
@@ -0,0 +1,26 @@
+package org.jetbrains.kotlinx.spark.api.udts
+
+import kotlinx.datetime.LocalDateTime
+import kotlinx.datetime.toJavaLocalDateTime
+import kotlinx.datetime.toKotlinLocalDateTime
+import org.apache.spark.sql.catalyst.util.DateTimeUtils
+import org.apache.spark.sql.types.DataType
+import org.apache.spark.sql.types.`TimestampNTZType$`
+import org.apache.spark.sql.types.UserDefinedType
+
+
+class LocalDateTimeUdt : UserDefinedType<LocalDateTime>() {
+
+    override fun userClass(): Class<LocalDateTime> = LocalDateTime::class.java
+    override fun deserialize(datum: Any?): LocalDateTime? =
+        when (datum) {
+            null -> null
+            is Long -> DateTimeUtils.microsToLocalDateTime(datum).toKotlinLocalDateTime()
+            else -> throw IllegalArgumentException("Unsupported datum: $datum")
+        }
+
+    override fun serialize(obj: LocalDateTime?): Long? =
+        obj?.let { DateTimeUtils.localDateTimeToMicros(it.toJavaLocalDateTime()) }
+
+    override fun sqlType(): DataType = `TimestampNTZType$`.`MODULE$`
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/LocalDateUdt.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/LocalDateUdt.kt
new file mode 100644
index 00000000..033b05e5
--- /dev/null
+++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/udts/LocalDateUdt.kt
@@ -0,0 +1,26 @@
+package org.jetbrains.kotlinx.spark.api.udts
+
+import kotlinx.datetime.LocalDate
+import kotlinx.datetime.toJavaLocalDate
+import kotlinx.datetime.toKotlinLocalDate
+import org.apache.spark.sql.catalyst.util.DateTimeUtils
+import org.apache.spark.sql.types.DataType
+import org.apache.spark.sql.types.`DateType$`
+import org.apache.spark.sql.types.UserDefinedType
+
+
+class LocalDateUdt : UserDefinedType<LocalDate>() {
+
+    override fun userClass(): Class<LocalDate> = LocalDate::class.java
+    override fun deserialize(datum: Any?): LocalDate? =
+        when (datum) {
+            null -> null
+            is Int -> DateTimeUtils.daysToLocalDate(datum).toKotlinLocalDate()
+            else -> throw IllegalArgumentException("Unsupported datum: $datum")
+        }
+
+    override fun serialize(obj: LocalDate?): Int? =
+        obj?.let { DateTimeUtils.localDateToDays(it.toJavaLocalDate()) }
+
+    override fun sqlType(): DataType = `DateType$`.`MODULE$`
+}
\ No newline at end of file
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt
index 9a7168e5..65a845c7 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt
@@ -21,6 +21,7 @@ import ch.tutteli.atrium.api.fluent.en_GB.*
 import ch.tutteli.atrium.api.verbs.expect
 import io.kotest.core.spec.style.ShouldSpec
 import io.kotest.matchers.shouldBe
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
 import scala.collection.Seq
 import java.io.Serializable
 import kotlin.collections.Iterator
@@ -34,7 +35,7 @@ class ApiTest : ShouldSpec({
         withSpark(props = mapOf("spark.sql.codegen.comments" to true)) {
 
             should("Create Seqs") {
-                spark.createDataset(seqOf(1, 2, 3), encoder())
+                spark.createDataset(seqOf(1, 2, 3), kotlinEncoderFor())
                     .collectAsList() shouldBe listOf(1, 2, 3)
 
 
@@ -165,4 +166,5 @@ class ApiTest : ShouldSpec({
 
 
 // (data) class must be Serializable to be broadcast
+@Sparkify
 data class SomeClass(val a: IntArray, val b: Int) : Serializable
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/CompilerPluginTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/CompilerPluginTest.kt
new file mode 100644
index 00000000..c9f684a7
--- /dev/null
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/CompilerPluginTest.kt
@@ -0,0 +1,47 @@
+package org.jetbrains.kotlinx.spark.api
+
+import io.kotest.assertions.throwables.shouldNotThrowAny
+import io.kotest.assertions.throwables.shouldThrowAny
+import io.kotest.core.spec.style.ShouldSpec
+import io.kotest.matchers.should
+import io.kotest.matchers.shouldBe
+import io.kotest.matchers.types.beInstanceOf
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.ColumnName
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
+
+class CompilerPluginTest : ShouldSpec({
+
+    @Sparkify
+    data class User(
+        val name: String = "John Doe",
+        val age: Int = 25,
+        @ColumnName("test")
+        val isEmpty: Boolean = false,
+    )
+
+    context("Compiler Plugin") {
+        should("be enabled") {
+            val user = User()
+            shouldNotThrowAny {
+                User::class.java.getMethod("name").invoke(user) shouldBe user.name
+                User::class.java.getMethod("age").invoke(user) shouldBe user.age
+                User::class.java.getMethod("test").invoke(user) shouldBe user.isEmpty
+            }
+
+            user should beInstanceOf<User>()
+            user should beInstanceOf<scala.Product>()
+            user should beInstanceOf<java.io.Serializable>()
+
+            shouldNotThrowAny {
+                User::class.java.getMethod("canEqual", Any::class.java).invoke(user, user) shouldBe true
+                User::class.java.getMethod("productArity").invoke(user) shouldBe 3
+                User::class.java.getMethod("productElement", Int::class.java).invoke(user, 0) shouldBe user.name
+                User::class.java.getMethod("productElement", Int::class.java).invoke(user, 1) shouldBe user.age
+                User::class.java.getMethod("productElement", Int::class.java).invoke(user, 2) shouldBe user.isEmpty
+            }
+            shouldThrowAny {
+                User::class.java.getMethod("productElement", Int::class.java).invoke(user, 10)
+            }
+        }
+    }
+})
\ No newline at end of file
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt
index 1cf6b861..be155590 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt
@@ -33,6 +33,7 @@ import org.apache.spark.sql.functions.col
 import org.apache.spark.sql.streaming.GroupState
 import org.apache.spark.sql.streaming.GroupStateTimeout
 import org.jetbrains.kotlinx.spark.api.tuples.*
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
 import scala.Tuple2
 import scala.Tuple3
 import scala.Tuple4
@@ -68,12 +69,16 @@ class DatasetFunctionTest : ShouldSpec({
             }
 
             should("handle join operations") {
+                @Sparkify
                 data class Left(val id: Int, val name: String)
 
+                @Sparkify
                 data class Right(val id: Int, val value: Int)
 
                 val first = dsOf(Left(1, "a"), Left(2, "b"))
                 val second = dsOf(Right(1, 100), Right(3, 300))
+                first.show()
+                second.show()
                 val result = first
                     .leftJoin(second, first.col("id") eq second.col("id"))
                     .map { it._1.id X it._1.name X it._2?.value }
@@ -208,8 +213,7 @@ class DatasetFunctionTest : ShouldSpec({
                     s = key
                     s shouldBe key
 
-                    if (collected.size > 1) collected.iterator()
-                    else emptyList<Tuple2<Int, String>>().iterator()
+                    if (collected.size > 1) collected else emptyList()
                 }
 
                 flatMappedWithState.count() shouldBe 2
@@ -453,4 +457,5 @@ class DatasetFunctionTest : ShouldSpec({
     }
 })
 
+@Sparkify
 data class SomeOtherClass(val a: IntArray, val b: Int, val c: Boolean) : Serializable
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt
index 5d6affcb..05acc6d0 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt
@@ -22,12 +22,19 @@ package org.jetbrains.kotlinx.spark.api
 import ch.tutteli.atrium.api.fluent.en_GB.*
 import ch.tutteli.atrium.api.verbs.expect
 import io.kotest.core.spec.style.ShouldSpec
+import io.kotest.matchers.collections.shouldContainExactly
 import io.kotest.matchers.shouldBe
+import io.kotest.matchers.string.shouldContain
+import kotlinx.datetime.DateTimePeriod
+import kotlinx.datetime.toKotlinDatePeriod
+import kotlinx.datetime.toKotlinInstant
+import kotlinx.datetime.toKotlinLocalDate
+import kotlinx.datetime.toKotlinLocalDateTime
 import org.apache.spark.sql.Dataset
 import org.apache.spark.sql.types.Decimal
 import org.apache.spark.unsafe.types.CalendarInterval
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
 import org.jetbrains.kotlinx.spark.api.tuples.*
-import org.jetbrains.kotlinx.spark.extensions.DemoCaseClass
 import scala.*
 import java.math.BigDecimal
 import java.sql.Date
@@ -35,10 +42,18 @@ import java.sql.Timestamp
 import java.time.Duration
 import java.time.Instant
 import java.time.LocalDate
+import java.time.LocalDateTime
 import java.time.Period
+import kotlin.time.TimeMark
+import kotlin.time.TimeSource
+import kotlin.time.TimeSource.Monotonic
+import kotlin.time.toKotlinDuration
 
 class EncodingTest : ShouldSpec({
 
+    @Sparkify
+    data class SparkifiedPair<T, U>(val first: T, val second: U)
+
     context("encoders") {
         withSpark(props = mapOf("spark.sql.codegen.comments" to true)) {
 
@@ -48,6 +63,12 @@ class EncodingTest : ShouldSpec({
                 dataset.collectAsList() shouldBe dates
             }
 
+            should("handle Kotlinx LocalDate Datasets") {
+                val dates = listOf(LocalDate.now().toKotlinLocalDate(), LocalDate.now().toKotlinLocalDate())
+                val dataset = dates.toDS()
+                dataset.collectAsList() shouldBe dates
+            }
+
             should("handle Instant Datasets") {
                 val instants = listOf(Instant.now(), Instant.now())
                 val dataset: Dataset<Instant> = instants.toDS()
@@ -58,17 +79,44 @@ class EncodingTest : ShouldSpec({
                 }
             }
 
+            should("handle Kotlinx Instant Datasets") {
+                val instants = listOf(Instant.now().toKotlinInstant(), Instant.now().toKotlinInstant())
+                val dataset = instants.toDS()
+                dataset.collectAsList().let { (first, second) ->
+                    val (a, b) = instants
+                    a.compareTo(first) shouldBe 0
+                    b.compareTo(second) shouldBe 0
+                }
+            }
+
             should("handle Timestamp Datasets") {
                 val timeStamps = listOf(Timestamp(0L), Timestamp(1L))
                 val dataset = timeStamps.toDS()
                 dataset.collectAsList() shouldBe timeStamps
             }
 
+            should("handle LocalDateTime") {
+                val timeStamps = listOf(LocalDateTime.now(), LocalDateTime.now().plusDays(3))
+                val dataset = timeStamps.toDS()
+                dataset.collectAsList() shouldBe timeStamps
+            }
+
+            should("handle Kotlinx LocalDateTime") {
+                val timeStamps = listOf(LocalDateTime.now().toKotlinLocalDateTime(), LocalDateTime.now().plusDays(3).toKotlinLocalDateTime())
+                val dataset = timeStamps.toDS()
+                dataset.collectAsList() shouldBe timeStamps
+            }
+
             //#if sparkMinor >= 3.2
             should("handle Duration Datasets") {
                 val dataset = dsOf(Duration.ZERO)
                 dataset.collectAsList() shouldBe listOf(Duration.ZERO)
             }
+
+            xshould("handle Kotlin Duration Datasets") {
+                val dataset = dsOf(Duration.ZERO.toKotlinDuration())
+                dataset.collectAsList() shouldBe listOf(Duration.ZERO.toKotlinDuration())
+            }
             //#endif
 
             //#if sparkMinor >= 3.2
@@ -87,6 +135,33 @@ class EncodingTest : ShouldSpec({
             }
             //#endif
 
+            should("handle Kotlinx DateTimePeriod Datasets") {
+                val periods = listOf(DateTimePeriod(years = 1), DateTimePeriod(hours = 2))
+                val dataset = periods.toDS()
+
+                dataset.show(false)
+
+                dataset.collectAsList().let {
+                    it[0] shouldBe DateTimePeriod(years = 1)
+                    // NOTE Spark truncates java.time.Period to months.
+                    it[1] shouldBe DateTimePeriod(hours = 2)
+                }
+            }
+
+            should("handle Kotlinx DatePeriod Datasets") {
+                val periods = listOf(Period.ZERO.toKotlinDatePeriod(), Period.ofDays(2).toKotlinDatePeriod())
+                val dataset = periods.toDS()
+
+                dataset.show(false)
+
+                dataset.collectAsList().let {
+                    it[0] shouldBe Period.ZERO.toKotlinDatePeriod()
+
+                    // NOTE Spark truncates java.time.Period to months.
+                    it[1] shouldBe Period.ofDays(0).toKotlinDatePeriod()
+                }
+            }
+
             should("handle binary datasets") {
                 val byteArray = "Hello there".encodeToByteArray()
                 val dataset = dsOf(byteArray)
@@ -132,8 +207,8 @@ class EncodingTest : ShouldSpec({
             }
 
             should("be able to serialize Date") {
-                val datePair = Date.valueOf("2020-02-10") to 5
-                val dataset: Dataset<Pair<Date, Int>> = dsOf(datePair)
+                val datePair = SparkifiedPair(Date.valueOf("2020-02-10"), 5)
+                val dataset: Dataset<SparkifiedPair<Date, Int>> = dsOf(datePair)
                 dataset.collectAsList() shouldBe listOf(datePair)
             }
 
@@ -209,11 +284,178 @@ class EncodingTest : ShouldSpec({
     context("schema") {
         withSpark(props = mapOf("spark.sql.codegen.comments" to true)) {
 
+            context("Give proper names to columns of data classes") {
+
+                infix fun <A, B> A.to(other: B) = SparkifiedPair(this, other)
+
+                should("Be able to serialize pairs") {
+                    val pairs = listOf(
+                        1 to "1",
+                        2 to "2",
+                        3 to "3",
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.collectAsList() shouldBe pairs
+                    dataset.columns().shouldContainExactly("first", "second")
+                }
+
+                should("Be able to serialize pairs of pairs") {
+                    val pairs = listOf(
+                        1 to (1 to "1"),
+                        2 to (2 to "2"),
+                        3 to (3 to "3"),
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.columns().shouldContainExactly("first", "second")
+                    dataset.select("second.*").columns().shouldContainExactly("first", "second")
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize pairs of pairs of pairs") {
+                    val pairs = listOf(
+                        1 to (1 to (1 to "1")),
+                        2 to (2 to (2 to "2")),
+                        3 to (3 to (3 to "3")),
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.columns().shouldContainExactly("first", "second")
+                    dataset.select("second.*").columns().shouldContainExactly("first", "second")
+                    dataset.select("second.second.*").columns().shouldContainExactly("first", "second")
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize lists of pairs") {
+                    val pairs = listOf(
+                        listOf(1 to "1", 2 to "2"),
+                        listOf(3 to "3", 4 to "4"),
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize lists of lists of pairs") {
+                    val pairs = listOf(
+                        listOf(
+                            listOf(1 to "1", 2 to "2"),
+                            listOf(3 to "3", 4 to "4")
+                        )
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize lists of lists of lists of pairs") {
+                    val pairs = listOf(
+                        listOf(
+                            listOf(
+                                listOf(1 to "1", 2 to "2"),
+                                listOf(3 to "3", 4 to "4"),
+                            )
+                        )
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize lists of lists of lists of pairs of pairs") {
+                    val pairs = listOf(
+                        listOf(
+                            listOf(
+                                listOf(1 to ("1" to 3.0), 2 to ("2" to 3.0)),
+                                listOf(3 to ("3" to 3.0), 4 to ("4" to 3.0)),
+                            )
+                        )
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize arrays of pairs") {
+                    val pairs = arrayOf(
+                        arrayOf(1 to "1", 2 to "2"),
+                        arrayOf(3 to "3", 4 to "4"),
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize arrays of arrays of pairs") {
+                    val pairs = arrayOf(
+                        arrayOf(
+                            arrayOf(1 to "1", 2 to "2"),
+                            arrayOf(3 to "3", 4 to "4")
+                        )
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+
+                should("Be able to serialize arrays of arrays of arrays of pairs") {
+                    val pairs = arrayOf(
+                        arrayOf(
+                            arrayOf(
+                                arrayOf(1 to "1", 2 to "2"),
+                                arrayOf(3 to "3", 4 to "4"),
+                            )
+                        )
+                    )
+                    val dataset = pairs.toDS()
+                    dataset.show()
+                    dataset.printSchema()
+                    dataset.schema().toString().let {
+                        it shouldContain "first"
+                        it shouldContain "second"
+                    }
+                    dataset.collectAsList() shouldBe pairs
+                }
+            }
+
             should("handle Scala Case class datasets") {
                 val caseClasses = listOf(
-                    DemoCaseClass(1, "1"),
-                    DemoCaseClass(2, "2"),
-                    DemoCaseClass(3, "3"),
+                    tupleOf(1, "1"),
+                    tupleOf(2, "2"),
+                    tupleOf(3, "3"),
                 )
                 val dataset = caseClasses.toDS()
                 dataset.show()
@@ -222,9 +464,9 @@ class EncodingTest : ShouldSpec({
 
             should("handle Scala Case class with data class datasets") {
                 val caseClasses = listOf(
-                    DemoCaseClass(1, "1" to 1L),
-                    DemoCaseClass(2, "2" to 2L),
-                    DemoCaseClass(3, "3" to 3L),
+                    tupleOf(1, "1" to 1L),
+                    tupleOf(2, "2" to 2L),
+                    tupleOf(3, "3" to 3L),
                 )
                 val dataset = caseClasses.toDS()
                 dataset.show()
@@ -233,9 +475,9 @@ class EncodingTest : ShouldSpec({
 
             should("handle data class with Scala Case class datasets") {
                 val caseClasses = listOf(
-                    1 to DemoCaseClass(1, "1"),
-                    2 to DemoCaseClass(2, "2"),
-                    3 to DemoCaseClass(3, "3"),
+                    1 to tupleOf(1, "1"),
+                    2 to tupleOf(2, "2"),
+                    3 to tupleOf(3, "3"),
                 )
                 val dataset = caseClasses.toDS()
                 dataset.show()
@@ -244,9 +486,9 @@ class EncodingTest : ShouldSpec({
 
             should("handle data class with Scala Case class & deeper datasets") {
                 val caseClasses = listOf(
-                    1 to DemoCaseClass(1, "1" to DemoCaseClass(1, 1.0)),
-                    2 to DemoCaseClass(2, "2" to DemoCaseClass(2, 2.0)),
-                    3 to DemoCaseClass(3, "3" to DemoCaseClass(3, 3.0)),
+                    1 to tupleOf(1, "1" to tupleOf(1, 1.0)),
+                    2 to tupleOf(2, "2" to tupleOf(2, 2.0)),
+                    3 to tupleOf(3, "3" to tupleOf(3, 3.0)),
                 )
                 val dataset = caseClasses.toDS()
                 dataset.show()
@@ -254,14 +496,14 @@ class EncodingTest : ShouldSpec({
             }
 
 
-            xshould("handle Scala Option datasets") {
+            should("handle Scala Option datasets") {
                 val caseClasses = listOf(Some(1), Some(2), Some(3))
                 val dataset = caseClasses.toDS()
                 dataset.show()
                 dataset.collectAsList() shouldBe caseClasses
             }
 
-            xshould("handle Scala Option Option datasets") {
+            should("handle Scala Option Option datasets") {
                 val caseClasses = listOf(
                     Some(Some(1)),
                     Some(Some(2)),
@@ -271,7 +513,7 @@ class EncodingTest : ShouldSpec({
                 dataset.collectAsList() shouldBe caseClasses
             }
 
-            xshould("handle data class Scala Option datasets") {
+            should("handle data class Scala Option datasets") {
                 val caseClasses = listOf(
                     Some(1) to Some(2),
                     Some(3) to Some(4),
@@ -281,7 +523,7 @@ class EncodingTest : ShouldSpec({
                 dataset.collectAsList() shouldBe caseClasses
             }
 
-            xshould("handle Scala Option data class datasets") {
+            should("handle Scala Option data class datasets") {
                 val caseClasses = listOf(
                     Some(1 to 2),
                     Some(3 to 4),
@@ -317,6 +559,7 @@ class EncodingTest : ShouldSpec({
                     listOf(SomeClass(intArrayOf(1, 2, 3), 4)),
                     listOf(SomeClass(intArrayOf(3, 2, 1), 0)),
                 )
+                dataset.printSchema()
 
                 val (first, second) = dataset.collectAsList()
 
@@ -426,14 +669,16 @@ class EncodingTest : ShouldSpec({
             }
 
             should("Generate schema correctly with nullalble list and map") {
-                val schema = encoder<NullFieldAbleDataClass>().schema()
+                val schema = kotlinEncoderFor<NullFieldAbleDataClass>().schema()
                 schema.fields().forEach {
                     it.nullable() shouldBe true
                 }
             }
 
             should("handle strings converted to lists") {
+                @Sparkify
                 data class Movie(val id: Long, val genres: String)
+                @Sparkify
                 data class MovieExpanded(val id: Long, val genres: List<String>)
 
                 val comedies = listOf(Movie(1, "Comedy|Romance"), Movie(2, "Horror|Action")).toDS()
@@ -450,8 +695,10 @@ class EncodingTest : ShouldSpec({
 
             should("handle strings converted to arrays") {
 
+                @Sparkify
                 data class Movie(val id: Long, val genres: String)
 
+                @Sparkify
                 data class MovieExpanded(val id: Long, val genres: Array<String>) {
                     override fun equals(other: Any?): Boolean {
                         if (this === other) return true
@@ -481,28 +728,28 @@ class EncodingTest : ShouldSpec({
             }
 
             should("handle arrays of generics") {
-                data class Test<Z>(val id: Long, val data: Array<Pair<Z, Int>>)
+                data class Test<Z>(val id: Long, val data: Array<SparkifiedPair<Z, Int>>)
 
-                val result = listOf(Test(1, arrayOf(5.1 to 6, 6.1 to 7)))
+                val result = listOf(Test(1, arrayOf(SparkifiedPair(5.1, 6), SparkifiedPair(6.1, 7))))
                     .toDS()
                     .map { it.id to it.data.firstOrNull { liEl -> liEl.first < 6 } }
                     .map { it.second }
                     .collectAsList()
-                expect(result).toContain.inOrder.only.values(5.1 to 6)
+                expect(result).toContain.inOrder.only.values(SparkifiedPair(5.1, 6))
             }
 
             should("handle lists of generics") {
-                data class Test<Z>(val id: Long, val data: List<Pair<Z, Int>>)
+                data class Test<Z>(val id: Long, val data: List<SparkifiedPair<Z, Int>>)
 
-                val result = listOf(Test(1, listOf(5.1 to 6, 6.1 to 7)))
+                val result = listOf(Test(1, listOf(SparkifiedPair(5.1, 6), SparkifiedPair(6.1, 7))))
                     .toDS()
                     .map { it.id to it.data.firstOrNull { liEl -> liEl.first < 6 } }
                     .map { it.second }
                     .collectAsList()
-                expect(result).toContain.inOrder.only.values(5.1 to 6)
+                expect(result).toContain.inOrder.only.values(SparkifiedPair(5.1, 6))
             }
 
-            should("!handle primitive arrays") {
+            should("handle boxed arrays") {
                 val result = listOf(arrayOf(1, 2, 3, 4))
                     .toDS()
                     .map { it.map { ai -> ai + 1 } }
@@ -514,6 +761,7 @@ class EncodingTest : ShouldSpec({
     }
 })
 
+@Sparkify
 data class IsSomethingClass(
     val enabled: Boolean,
     val isEnabled: Boolean,
@@ -523,14 +771,17 @@ data class IsSomethingClass(
     val getDouble: Double
 )
 
+@Sparkify
 data class DataClassWithTuple<T : Product>(val tuple: T)
 
+@Sparkify
 data class LonLat(val lon: Double, val lat: Double)
 
 enum class SomeEnum { A, B }
 
 enum class SomeOtherEnum(val value: Int) { C(1), D(2) }
 
+@Sparkify
 data class ComplexEnumDataClass(
     val int: Int,
     val string: String,
@@ -544,6 +795,7 @@ data class ComplexEnumDataClass(
     val enumMap: Map<SomeEnum, SomeOtherEnum>,
 )
 
+@Sparkify
 data class NullFieldAbleDataClass(
     val optionList: List<Int>?,
     val optionMap: Map<String, Int>?,
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/RddTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/RddTest.kt
index 5f9b6d94..51a97c3d 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/RddTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/RddTest.kt
@@ -4,13 +4,17 @@ import io.kotest.core.spec.style.ShouldSpec
 import io.kotest.matchers.collections.shouldContainAll
 import io.kotest.matchers.shouldBe
 import org.apache.spark.api.java.JavaRDD
-import org.jetbrains.kotlinx.spark.api.tuples.*
+import org.jetbrains.kotlinx.spark.api.tuples.X
+import org.jetbrains.kotlinx.spark.api.tuples.t
 import scala.Tuple2
 
 class RddTest : ShouldSpec({
     context("RDD extension functions") {
 
-        withSpark(logLevel = SparkLogLevel.DEBUG) {
+        withSpark(
+            props = mapOf("spark.sql.codegen.wholeStage" to false),
+            logLevel = SparkLogLevel.DEBUG,
+        ) {
 
             context("Key/value") {
                 should("work with spark example") {
@@ -70,7 +74,8 @@ class RddTest : ShouldSpec({
                     rdd.min() shouldBe 1.0
                 }
 
-                context("Work with any number") {
+                // TODO Does not work from testing environment
+                xcontext("Work with any number") {
 
                     should("Work with Bytes") {
                         val data = listOf(1, 1, 2, 2, 2, 3).map(Int::toByte)
@@ -104,7 +109,7 @@ class RddTest : ShouldSpec({
 
                     should("Work with Doubles") {
                         val data = listOf(1, 1, 2, 2, 2, 3).map(Int::toDouble)
-                        val rdd = data.toRDD().toJavaDoubleRDD()
+                        val rdd = data.toRDD()
                         rdd.sum() shouldBe data.sum().toDouble()
                     }
                 }
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt
index 4cc8b9c8..3667fa45 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt
@@ -26,22 +26,20 @@ import io.kotest.matchers.collections.shouldContainAll
 import io.kotest.matchers.shouldBe
 import org.apache.commons.io.FileUtils
 import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
 import org.apache.spark.SparkException
-import org.apache.spark.streaming.Checkpoint
 import org.apache.spark.streaming.Duration
 import org.apache.spark.streaming.Durations
 import org.apache.spark.streaming.Time
-import org.apache.spark.util.Utils
 import org.jetbrains.kotlinx.spark.api.tuples.X
 import org.jetbrains.kotlinx.spark.api.tuples.component1
 import org.jetbrains.kotlinx.spark.api.tuples.component2
 import org.jetbrains.kotlinx.spark.api.tuples.t
-import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions
-import org.jetbrains.kotlinx.spark.extensions.`KSparkExtensions$`
 import scala.Tuple2
 import java.io.File
 import java.io.Serializable
 import java.nio.charset.StandardCharsets
+import java.nio.file.Files
 import java.util.*
 import java.util.concurrent.atomic.AtomicBoolean
 
@@ -202,18 +200,43 @@ class StreamingTest : ShouldSpec({
 })
 
 
-private val scalaCompatVersion = `KSparkExtensions$`.`MODULE$`.scalaCompatVersion()
-private val sparkVersion = `KSparkExtensions$`.`MODULE$`.sparkVersion()
-private fun createTempDir() = Utils.createTempDir(
-    System.getProperty("java.io.tmpdir"),
-    "spark_${scalaCompatVersion}_${sparkVersion}"
-).apply { deleteOnExit() }
+private val scalaCompatVersion = SCALA_COMPAT_VERSION
+private val sparkVersion = SPARK_VERSION
+private fun createTempDir() =
+    Files.createTempDirectory("spark_${scalaCompatVersion}_${sparkVersion}")
+        .toFile()
+        .also { it.deleteOnExit() }
+
+private fun checkpointFile(checkpointDir: String, checkpointTime: Time): Path {
+    val klass = Class.forName("org.apache.spark.streaming.Checkpoint$")
+    val moduleField = klass.getField("MODULE$").also { it.isAccessible = true }
+    val module = moduleField.get(null)
+    val checkpointFileMethod = klass.getMethod("checkpointFile", String::class.java, Time::class.java)
+        .also { it.isAccessible = true }
+    return checkpointFileMethod.invoke(module, checkpointDir, checkpointTime) as Path
+}
+
+private fun getCheckpointFiles(
+    checkpointDir: String,
+    fs: scala.Option<FileSystem>
+): scala.collection.Seq<Path> {
+    val klass = Class.forName("org.apache.spark.streaming.Checkpoint$")
+    val moduleField = klass.getField("MODULE$").also { it.isAccessible = true }
+    val module = moduleField.get(null)
+    val getCheckpointFilesMethod = klass.getMethod("getCheckpointFiles", String::class.java, scala.Option::class.java)
+        .also { it.isAccessible = true }
+    return getCheckpointFilesMethod.invoke(module, checkpointDir, fs) as scala.collection.Seq<Path>
+}
 
 private fun createCorruptedCheckpoint(): String {
     val checkpointDirectory = createTempDir().absolutePath
-    val fakeCheckpointFile = Checkpoint.checkpointFile(checkpointDirectory, Time(1000))
-    FileUtils.write(File(fakeCheckpointFile.toString()), "spark_corrupt_${scalaCompatVersion}_${sparkVersion}", StandardCharsets.UTF_8)
-    assert(Checkpoint.getCheckpointFiles(checkpointDirectory, (null as FileSystem?).toOption()).nonEmpty())
+    val fakeCheckpointFile = checkpointFile(checkpointDirectory, Time(1000))
+    FileUtils.write(
+        File(fakeCheckpointFile.toString()),
+        "spark_corrupt_${scalaCompatVersion}_${sparkVersion}",
+        StandardCharsets.UTF_8
+    )
+    assert(getCheckpointFiles(checkpointDirectory, (null as FileSystem?).toOption()).nonEmpty())
     return checkpointDirectory
 }
 
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt
index 0d65dafe..332db122 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt
@@ -23,22 +23,29 @@ import ch.tutteli.atrium.creating.Expect
 import io.kotest.core.spec.style.ShouldSpec
 import org.apache.spark.sql.types.ArrayType
 import org.apache.spark.sql.types.IntegerType
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
 import org.jetbrains.kotlinx.spark.api.struct.model.DataType.StructType
 import org.jetbrains.kotlinx.spark.api.struct.model.DataType.TypeName
 import org.jetbrains.kotlinx.spark.api.struct.model.ElementType.ComplexElement
 import org.jetbrains.kotlinx.spark.api.struct.model.ElementType.SimpleElement
 import org.jetbrains.kotlinx.spark.api.struct.model.Struct
 import org.jetbrains.kotlinx.spark.api.struct.model.StructField
-import kotlin.reflect.typeOf
-
 
 @OptIn(ExperimentalStdlibApi::class)
 class TypeInferenceTest : ShouldSpec({
+    @Sparkify
+    data class SparkifiedPair<T, U>(val first: T, val second: U)
+
+    @Sparkify
+    data class SparkifiedTriple<T, U, V>(val first: T, val second: U, val third: V)
+
     context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema") {
-        data class Test2<T>(val vala2: T, val para2: Pair<T, String>)
-        data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
+        @Sparkify
+        data class Test2<T>(val vala2: T, val para2: SparkifiedPair<T, String>)
+        @Sparkify
+        data class Test<T>(val vala: T, val tripl1: SparkifiedTriple<T, Test2<Long>, T>)
 
-        val struct = Struct.fromJson(schema(typeOf<Pair<String, Test<Int>>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<SparkifiedPair<String, Test<Int>>>().prettyJson())!!
         should("contain correct typings") {
             expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
                 hasField("first", "string"),
@@ -64,11 +71,15 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema with more complex data") {
+        @Sparkify
         data class Single<T>(val vala3: T)
-        data class Test2<T>(val vala2: T, val para2: Pair<T, Single<Double>>)
-        data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)
 
-        val struct = Struct.fromJson(schema(typeOf<Pair<String, Test<Int>>>()).prettyJson())!!
+        @Sparkify
+        data class Test2<T>(val vala2: T, val para2: SparkifiedPair<T, Single<Double>>)
+        @Sparkify
+        data class Test<T>(val vala: T, val tripl1: SparkifiedTriple<T, Test2<Long>, T>)
+
+        val struct = Struct.fromJson(schemaFor<SparkifiedPair<String, Test<Int>>>().prettyJson())!!
         should("contain correct typings") {
             expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
                 hasField("first", "string"),
@@ -97,9 +108,9 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema without generics") {
-        data class Test(val a: String, val b: Int, val c: Double)
+        @Sparkify data class Test(val a: String, val b: Int, val c: Double)
 
-        val struct = Struct.fromJson(schema(typeOf<Test>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<Test>().prettyJson())!!
         should("return correct types too") {
             expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
                 hasField("a", "string"),
@@ -109,7 +120,7 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("type with list of ints") {
-        val struct = Struct.fromJson(schema(typeOf<List<Int>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<List<Int>>().prettyJson())!!
         should("return correct types too") {
             expect(struct) {
                 isOfType("array")
@@ -118,7 +129,7 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("type with list of Pairs int to long") {
-        val struct = Struct.fromJson(schema(typeOf<List<Pair<Int, Long>>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<List<SparkifiedPair<Int, Long>>>().prettyJson())!!
         should("return correct types too") {
             expect(struct) {
                 isOfType("array")
@@ -132,9 +143,9 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("type with list of generic data class with E generic name") {
-        data class Test<E>(val e: E)
+        @Sparkify data class Test<E>(val e: E)
 
-        val struct = Struct.fromJson(schema(typeOf<List<Test<String>>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<List<Test<String>>>().prettyJson())!!
         should("return correct types too") {
             expect(struct) {
                 isOfType("array")
@@ -147,7 +158,7 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("type with list of list of int") {
-        val struct = Struct.fromJson(schema(typeOf<List<List<Int>>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<List<List<Int>>>().prettyJson())!!
         should("return correct types too") {
             expect(struct) {
                 isOfType("array")
@@ -158,7 +169,7 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("Subtypes of list") {
-        val struct = Struct.fromJson(schema(typeOf<ArrayList<Int>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<ArrayList<Int>>().prettyJson())!!
         should("return correct types too") {
             expect(struct) {
                 isOfType("array")
@@ -168,7 +179,7 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("Subtypes of list with nullable values") {
-        val struct = Struct.fromJson(schema(typeOf<ArrayList<Int?>>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<ArrayList<Int?>>().prettyJson())!!
         should("return correct types too") {
             expect(struct) {
                 isOfType("array")
@@ -178,9 +189,9 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("data class with props in order lon → lat") {
-        data class Test(val lon: Double, val lat: Double)
+        @Sparkify data class Test(val lon: Double, val lat: Double)
 
-        val struct = Struct.fromJson(schema(typeOf<Test>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<Test>().prettyJson())!!
         should("Not change order of fields") {
             expect(struct.fields).notToEqualNull().containsExactly(
                 hasField("lon", "double"),
@@ -189,9 +200,9 @@ class TypeInferenceTest : ShouldSpec({
         }
     }
     context("data class with nullable list inside") {
-        data class Sample(val optionList: List<Int>?)
+        @Sparkify data class Sample(val optionList: List<Int>?)
 
-        val struct = Struct.fromJson(schema(typeOf<Sample>()).prettyJson())!!
+        val struct = Struct.fromJson(schemaFor<Sample>().prettyJson())!!
 
         should("show that list is nullable and element is not") {
             expect(struct)
@@ -213,7 +224,7 @@ class TypeInferenceTest : ShouldSpec({
         }
 
         should("generate valid serializer schema") {
-            expect(encoder<Sample>().schema()) {
+            expect(schemaFor<Sample>() as org.apache.spark.sql.types.StructType) {
                 this
                     .feature("data type", { this.fields()?.asList() }) {
                         this.notToEqualNull().toContain.inOrder.only.entry {
@@ -221,8 +232,8 @@ class TypeInferenceTest : ShouldSpec({
                                 .feature("element name", { name() }) { toEqual("optionList") }
                                 .feature("field type", { dataType() }, {
                                     this
-                                        .isA<ArrayType>()
-                                        .feature("element type", { elementType() }) { isA<IntegerType>() }
+                                        .toBeAnInstanceOf<ArrayType>()
+                                        .feature("element type", { elementType() }) { toBeAnInstanceOf<IntegerType>() }
                                         .feature("element nullable", { containsNull() }) { toEqual(expected = false) }
                                 })
                                 .feature("optionList nullable", { nullable() }) { toEqual(true) }
@@ -256,5 +267,5 @@ private fun hasStruct(
 
 private fun hasField(name: String, type: String): Expect<StructField>.() -> Unit = {
     feature { f(it::name) }.toEqual(name)
-    feature { f(it::type) }.isA<TypeName>().feature { f(it::value) }.toEqual(type)
+    feature { f(it::type) }.toBeAnInstanceOf<TypeName>().feature { f(it::value) }.toEqual(type)
 }
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFTest.kt
index 393d54d5..8bac0408 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFTest.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFTest.kt
@@ -33,7 +33,9 @@ import org.apache.spark.sql.Encoder
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.expressions.Aggregator
 import org.intellij.lang.annotations.Language
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
 import org.junit.jupiter.api.assertThrows
+import scala.Product
 import scala.collection.Seq
 import java.io.Serializable
 import kotlin.random.Random
@@ -234,7 +236,8 @@ class UDFTest : ShouldSpec({
                     udf.register(::stringIntDiff)
 
                     @Language("SQL")
-                    val result = spark.sql("SELECT stringIntDiff(first, second) FROM test1").to<Int>().collectAsList()
+                    val result =
+                        spark.sql("SELECT stringIntDiff(getFirst, getSecond) FROM test1").to<Int>().collectAsList()
                     result shouldBe listOf(96, 96)
                 }
             }
@@ -303,7 +306,8 @@ class UDFTest : ShouldSpec({
                     )
                     ds should beOfType<Dataset<String>>()
 
-                    "${nameConcatAge.name}(${NormalClass::name.name}, ${NormalClass::age.name})" shouldBe ds.columns().single()
+                    "${nameConcatAge.name}(${NormalClass::name.name}, ${NormalClass::age.name})" shouldBe ds.columns()
+                        .single()
 
                     val collectAsList = ds.collectAsList()
                     collectAsList[0] shouldBe "a-10"
@@ -328,7 +332,8 @@ class UDFTest : ShouldSpec({
                     )
                     ds should beOfType<Dataset<Row>>()
 
-                    "${nameConcatAge.name}(${NormalClass::name.name}, ${NormalClass::age.name})" shouldBe ds.columns().single()
+                    "${nameConcatAge.name}(${NormalClass::name.name}, ${NormalClass::age.name})" shouldBe ds.columns()
+                        .single()
 
                     val collectAsList = ds.collectAsList()
                     collectAsList[0].getAs<String>(0) shouldBe "a-10"
@@ -353,7 +358,8 @@ class UDFTest : ShouldSpec({
                     )
                     ds should beOfType<Dataset<Row>>()
 
-                    "${nameConcatAge.name}(${NormalClass::name.name}, ${NormalClass::age.name})" shouldBe ds.columns().single()
+                    "${nameConcatAge.name}(${NormalClass::name.name}, ${NormalClass::age.name})" shouldBe ds.columns()
+                        .single()
 
                     val collectAsList = ds.collectAsList()
                     collectAsList[0].getAs<String>(0) shouldBe "a-10"
@@ -418,13 +424,14 @@ class UDFTest : ShouldSpec({
 
         context("udf return data class") {
             withSpark(logLevel = SparkLogLevel.DEBUG) {
+                /** TODO [org.apache.spark.sql.catalyst.CatalystTypeConverters.StructConverter.toCatalystImpl] needs it to be a [scala.Product] */
                 should("return NormalClass") {
                     listOf("a" to 1, "b" to 2).toDS().toDF().createOrReplaceTempView("test2")
 
                     udf.register("toNormalClass") { name: String, age: Int ->
                         NormalClass(age, name)
                     }
-                    spark.sql("select toNormalClass(first, second) from test2").show()
+                    spark.sql("select toNormalClass(getFirst, getSecond) from test2").show()
                 }
 
                 should("not return NormalClass when not registered") {
@@ -433,16 +440,17 @@ class UDFTest : ShouldSpec({
                     val toNormalClass2 = udf("toNormalClass2", ::NormalClass)
 
                     shouldThrow<AnalysisException> {
-                        spark.sql("select toNormalClass2(first, second) from test2").show()
+                        spark.sql("select toNormalClass2(getFirst, getSecond) from test2").show()
                     }
                 }
 
+                /** TODO [org.apache.spark.sql.catalyst.CatalystTypeConverters.StructConverter.toCatalystImpl] needs it to be a [scala.Product] */
                 should("return NormalClass using accessed by delegate") {
                     listOf(1 to "a", 2 to "b").toDS().toDF().createOrReplaceTempView("test2")
                     val toNormalClass3 = udf("toNormalClass3", ::NormalClass)
                     toNormalClass3.register()
 
-                    spark.sql("select toNormalClass3(first, second) from test2").show()
+                    spark.sql("select toNormalClass3(getFirst, getSecond) from test2").show()
                 }
             }
         }
@@ -491,8 +499,8 @@ class UDFTest : ShouldSpec({
                             buffer.apply { sum += it.sum; count += it.count }
 
                         override fun finish(it: Average) = it.sum.toDouble() / it.count
-                        override fun bufferEncoder() = encoder<Average>()
-                        override fun outputEncoder() = encoder<Double>()
+                        override fun bufferEncoder() = kotlinEncoderFor<Average>()
+                        override fun outputEncoder() = kotlinEncoderFor<Double>()
                     }
 
 //                    shouldThrow<IllegalStateException> {
@@ -615,8 +623,8 @@ class UDFTest : ShouldSpec({
                                 buffer.apply { sum += it.sum; count += it.count }
 
                             override fun finish(it: Average) = it.sum.toDouble() / it.count
-                            override fun bufferEncoder() = encoder<Average>()
-                            override fun outputEncoder() = encoder<Double>()
+                            override fun bufferEncoder() = kotlinEncoderFor<Average>()
+                            override fun outputEncoder() = kotlinEncoderFor<Double>()
                         }
                     )
 
@@ -642,7 +650,6 @@ class UDFTest : ShouldSpec({
                 }
 
 
-
             }
         }
 
@@ -1261,7 +1268,9 @@ class UDFTest : ShouldSpec({
     }
 })
 
+@Sparkify
 data class Employee(val name: String, val salary: Long)
+@Sparkify
 data class Average(var sum: Long, var count: Long)
 
 private object MyAverage : Aggregator<Employee, Average, Double>() {
@@ -1288,10 +1297,10 @@ private object MyAverage : Aggregator<Employee, Average, Double>() {
     override fun finish(reduction: Average): Double = reduction.sum.toDouble() / reduction.count
 
     // Specifies the Encoder for the intermediate value type
-    override fun bufferEncoder(): Encoder<Average> = encoder()
+    override fun bufferEncoder(): Encoder<Average> = kotlinEncoderFor()
 
     // Specifies the Encoder for the final output value type
-    override fun outputEncoder(): Encoder<Double> = encoder()
+    override fun outputEncoder(): Encoder<Double> = kotlinEncoderFor()
 
 }
 
@@ -1316,10 +1325,22 @@ private val aggregator = aggregatorOf<Long, Average, Double>(
 
 private val addTwoConst = { x: Int, y: Int -> x + y }
 
+@Sparkify
 data class NormalClass(
     val age: Int,
     val name: String
 )
+//    : Product {
+//    override fun canEqual(that: Any?): Boolean = that is NormalClass
+//
+//    override fun productElement(n: Int): Any =
+//        when (n) {
+//            0 -> age
+//            1 -> name
+//            else -> throw IndexOutOfBoundsException(n.toString())
+//        }
+//    override fun productArity(): Int = 2
+//}
 
 private val firstByteVal = { a: ByteArray -> a.firstOrNull() }
 private val firstShortVal = { a: ShortArray -> a.firstOrNull() }
diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt
index f0d365e6..72a2f99e 100644
--- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt
+++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt
@@ -23,6 +23,7 @@ import com.beust.klaxon.Converter
 import com.beust.klaxon.JsonObject
 import com.beust.klaxon.JsonValue
 import com.beust.klaxon.Klaxon
+import org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify
 
 private fun <T> Klaxon.convert(
     k: kotlin.reflect.KClass<*>,
@@ -43,6 +44,7 @@ private val klaxon = Klaxon()
     .convert(DataType::class, { DataType.fromJson(it) }, { it.toJson() }, true)
     .convert(ElementType::class, { ElementType.fromJson(it) }, { it.toJson() }, true)
 
+@Sparkify
 data class Struct(
     val type: String,
     val fields: List<StructField>? = null,
@@ -56,6 +58,7 @@ data class Struct(
     }
 }
 
+@Sparkify
 data class StructField(
     val name: String,
     val type: DataType,
@@ -66,8 +69,8 @@ data class StructField(
 typealias Metadata = JsonObject
 
 sealed class DataType {
-    data class StructType(val value: Struct) : DataType()
-    data class TypeName(val value: String) : DataType()
+    @Sparkify data class StructType(val value: Struct) : DataType()
+    @Sparkify data class TypeName(val value: String) : DataType()
 
     public fun toJson(): String = klaxon.toJsonString(when (this) {
         is StructType -> this.value
@@ -84,8 +87,8 @@ sealed class DataType {
 }
 
 sealed class ElementType {
-    data class SimpleElement(val value: String) : ElementType()
-    data class ComplexElement(val value: Struct) : ElementType()
+    @Sparkify data class SimpleElement(val value: String) : ElementType()
+    @Sparkify data class ComplexElement(val value: Struct) : ElementType()
 
     public fun toJson(): String = klaxon.toJsonString(when (this) {
         is SimpleElement -> this.value
diff --git a/core/build.gradle.kts b/scala-helpers/build.gradle.kts
similarity index 74%
rename from core/build.gradle.kts
rename to scala-helpers/build.gradle.kts
index d9d09217..b636c14d 100644
--- a/core/build.gradle.kts
+++ b/scala-helpers/build.gradle.kts
@@ -1,4 +1,4 @@
-@file:Suppress("UnstableApiUsage", "NOTHING_TO_INLINE")
+@file:Suppress("UnstableApiUsage")
 
 import com.igormaznitsa.jcp.gradle.JcpTask
 import com.vanniktech.maven.publish.JavaLibrary
@@ -20,7 +20,7 @@ repositories {
 
 dependencies {
 
-    with(Dependencies) {
+    Dependencies {
         api(
             scalaLibrary,
             reflect,
@@ -30,22 +30,18 @@ dependencies {
         if (Versions.spark == "3.3.1") implementation(jacksonDatabind)
 
         implementation(
-            sparkSql,
+//            sparkSql, not needed atm
         )
     }
 }
 
-
 java {
     toolchain {
         if (Versions.scalaCompat.toDouble() > 2.12) { // scala 2.12 will always target java 8
-            languageVersion.set(
-                JavaLanguageVersion.of(Versions.jvmTarget)
-            )
+            languageVersion = JavaLanguageVersion.of(Versions.jvmTarget)
+
         } else if (Versions.jvmTarget == "1.8" || Versions.jvmTarget == "8") {
-            languageVersion.set(
-                JavaLanguageVersion.of(8)
-            )
+            languageVersion = JavaLanguageVersion.of(8)
         }
     }
 }
@@ -61,10 +57,10 @@ tasks.withType<ScalaCompile> {
 val scalaMainSources = sourceSets.main.get().scala.sourceDirectories
 
 val preprocessMain by tasks.creating(JcpTask::class)  {
-    sources.set(scalaMainSources)
-    clearTarget.set(true)
-    fileExtensions.set(listOf("scala"))
-    vars.set(Versions.versionMap)
+    sources = scalaMainSources
+    clearTarget = true
+    fileExtensions = listOf("scala")
+    vars = Versions.versionMap
     outputs.upToDateWhen { target.get().exists() }
 }
 
@@ -99,3 +95,8 @@ mavenPublishing {
     configure(JavaLibrary(Javadoc()))
 }
 
+// Publishing of scala-helpers can be skipped since it's only dependent on the Scala version
+val skipScalaOnlyDependent = System.getProperty("skipScalaOnlyDependent").toBoolean()
+tasks
+    .filter { "publish" in it.name }
+    .forEach { it.onlyIf { !skipScalaOnlyDependent } }
\ No newline at end of file
diff --git a/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala b/scala-helpers/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala
similarity index 57%
rename from core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala
rename to scala-helpers/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala
index a28b0848..5fc912b7 100644
--- a/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala
+++ b/scala-helpers/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala
@@ -19,45 +19,23 @@
  */
 package org.jetbrains.kotlinx.spark.extensions
 
-import org.apache.spark.SparkContext
-import org.apache.spark.sql._
-import java.util
 import scala.reflect.ClassTag
 
 object KSparkExtensions {
 
-  val kotlinVersion = /*$"\""+kotlin+"\""$*/ /*-*/ ""
-  val scalaVersion = /*$"\""+scala+"\""$*/ /*-*/ ""
-  val scalaCompatVersion = /*$"\""+scalaCompat+"\""$*/ /*-*/ ""
-  val sparkVersion = /*$"\""+spark+"\""$*/ /*-*/ ""
-  val sparkMinorVersion = /*$"\""+sparkMinor+"\""$*/ /*-*/ ""
-
-  def col(d: Dataset[_], name: String): Column = d.col(name)
-
-  def col(name: String): Column = functions.col(name)
-
-  def lit(literal: Any): Column = functions.lit(literal)
-
-  def collectAsList[T](ds: Dataset[T]): util.List[T] = {
-    //#if scalaCompat >= 2.13
-    scala.jdk.javaapi.CollectionConverters.asJava(ds.collect())
-    //#else
-    //$scala.collection.JavaConverters.seqAsJavaList(ds.collect())
-    //#endif
-  }
-
-
-  def debugCodegen(df: Dataset[_]): Unit = {
-    import org.apache.spark.sql.execution.debug._
-    df.debugCodegen()
-  }
-
-  def debug(df: Dataset[_]): Unit = {
-    import org.apache.spark.sql.execution.debug._
-    df.debug()
-  }
-
-  def sparkContext(s: SparkSession): SparkContext = s.sparkContext
+//  def col(d: Dataset[_], name: String): Column = d.col(name)
+//
+//  def col(name: String): Column = functions.col(name)
+//
+//  def lit(literal: Any): Column = functions.lit(literal)
+//
+//  def collectAsList[T](ds: Dataset[T]): util.List[T] = {
+//    //#if scalaCompat >= 2.13
+//    scala.jdk.javaapi.CollectionConverters.asJava(ds.collect())
+//    //#else
+//    //$scala.collection.JavaConverters.seqAsJavaList(ds.collect())
+//    //#endif
+//  }
 
   /**
    * Produces a ClassTag[T], which is actually just a casted ClassTag[AnyRef].
diff --git a/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/VarargUnwrapper.scala b/scala-helpers/src/main/scala/org/jetbrains/kotlinx/spark/extensions/VarargUnwrapper.scala
similarity index 93%
rename from core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/VarargUnwrapper.scala
rename to scala-helpers/src/main/scala/org/jetbrains/kotlinx/spark/extensions/VarargUnwrapper.scala
index 27f317a4..30f8fb63 100644
--- a/core/src/main/scala/org/jetbrains/kotlinx/spark/extensions/VarargUnwrapper.scala
+++ b/scala-helpers/src/main/scala/org/jetbrains/kotlinx/spark/extensions/VarargUnwrapper.scala
@@ -1,6 +1,13 @@
 package org.jetbrains.kotlinx.spark.extensions
 
-import org.apache.spark.sql.api.java.{UDF1, UDF2}
+
+trait VarargUnwrapperUDT1[T1, R] extends Serializable {
+  def apply(v1: T1): R
+}
+
+trait VarargUnwrapperUDT2[T1, T2, R] extends Serializable {
+  def apply(v1: T1, v2: T2): R
+}
 
 /**
  * Allows any simple vararg function reference to be treated as 23 different Scala functions.
@@ -13,8 +20,8 @@ import org.apache.spark.sql.api.java.{UDF1, UDF2}
  * @tparam R
  */
 class VarargUnwrapper[T, Array, R](
-    val varargFunc: UDF1[Array, R],
-    val newArray: UDF2[Integer, UDF1[Integer, T], Array],
+    val varargFunc: VarargUnwrapperUDT1[Array, R],
+    val newArray: VarargUnwrapperUDT2[Integer, VarargUnwrapperUDT1[Integer, T], Array],
 ) extends Serializable
   with Function0[R]
   with Function1[T, R]
@@ -40,7 +47,7 @@ class VarargUnwrapper[T, Array, R](
   with Function21[T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, R]
   with Function22[T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, R] {
 
-  private def vararg(t: T*): R = varargFunc.call(newArray.call(t.size, { t(_) }))
+  private def vararg(t: T*): R = varargFunc(newArray(t.size, { t(_) }))
 
   override def curried: Nothing = throw new UnsupportedOperationException()
   override def tupled: Nothing = throw new UnsupportedOperationException()
diff --git a/scala-tuples-in-kotlin/build.gradle.kts b/scala-tuples-in-kotlin/build.gradle.kts
index 2843c1f6..cc99e208 100644
--- a/scala-tuples-in-kotlin/build.gradle.kts
+++ b/scala-tuples-in-kotlin/build.gradle.kts
@@ -27,7 +27,7 @@ tasks.withType<Test>().configureEach {
 }
 
 dependencies {
-    with(Dependencies) {
+    Dependencies {
         implementation(
             kotlinStdLib,
             scalaLibrary,
@@ -43,9 +43,7 @@ dependencies {
 
 kotlin {
     jvmToolchain {
-        languageVersion.set(
-            JavaLanguageVersion.of(Versions.jvmTarget)
-        )
+        languageVersion = JavaLanguageVersion.of(Versions.jvmTarget)
     }
 }
 
@@ -70,8 +68,8 @@ mavenPublishing {
 
 
 // Publishing of scala-tuples-in-kotlin can be skipped since it's only dependent on the Scala version
-val skipScalaTuplesInKotlin = System.getProperty("skipScalaTuplesInKotlin").toBoolean()
+val skipScalaOnlyDependent = System.getProperty("skipScalaOnlyDependent").toBoolean()
 tasks
     .filter { "publish" in it.name }
-    .forEach { it.onlyIf { !skipScalaTuplesInKotlin } }
+    .forEach { it.onlyIf { !skipScalaOnlyDependent } }
 
diff --git a/settings.gradle.kts b/settings.gradle.kts
index d0aa217b..07822dec 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -1,3 +1,12 @@
+pluginManagement {
+    repositories {
+        mavenLocal()
+        mavenCentral()
+        gradlePluginPortal()
+        maven("https://maven.pkg.jetbrains.space/kotlin/p/kotlin/bootstrap")
+    }
+}
+
 plugins {
     id("com.gradle.enterprise") version "3.10.3"
 }
@@ -9,14 +18,14 @@ gradleEnterprise {
     }
 }
 
-
 val spark: String by settings
 val scala: String by settings
-val skipScalaTuplesInKotlin: String by settings
+val skipScalaOnlyDependent: String by settings
+val sparkConnect: String by settings
 System.setProperty("spark", spark)
 System.setProperty("scala", scala)
-System.setProperty("skipScalaTuplesInKotlin", skipScalaTuplesInKotlin)
-
+System.setProperty("skipScalaOnlyDependent", skipScalaOnlyDependent)
+System.setProperty("sparkConnect", sparkConnect)
 
 val scalaCompat
     get() = scala.substringBeforeLast('.')
@@ -25,14 +34,20 @@ val versions = "${spark}_${scalaCompat}"
 
 rootProject.name = "kotlin-spark-api-parent_$versions"
 
-include("core")
+include("scala-helpers")
 include("scala-tuples-in-kotlin")
 include("kotlin-spark-api")
 include("jupyter")
 include("examples")
+include("spark-connect-examples")
+include("compiler-plugin")
+include("gradle-plugin")
 
-project(":core").name = "core_$versions"
+// just scala dependent
+project(":scala-helpers").name = "scala-helpers_$scalaCompat"
 project(":scala-tuples-in-kotlin").name = "scala-tuples-in-kotlin_$scalaCompat"
+
+// spark+scala dependent
 project(":kotlin-spark-api").name = "kotlin-spark-api_$versions"
 project(":jupyter").name = "jupyter_$versions"
 project(":examples").name = "examples_$versions"
diff --git a/spark-connect-examples/build.gradle.kts b/spark-connect-examples/build.gradle.kts
new file mode 100644
index 00000000..c1f20c0a
--- /dev/null
+++ b/spark-connect-examples/build.gradle.kts
@@ -0,0 +1,60 @@
+import org.jetbrains.kotlin.gradle.dsl.JvmTarget
+
+plugins {
+    // Needs to be installed in the local maven repository or have the bootstrap jar on the classpath
+    id("org.jetbrains.kotlinx.spark.api")
+    kotlin("jvm")
+    application
+}
+
+// run with `./gradlew run`
+application {
+    mainClass = "org.jetbrains.kotlinx.spark.examples.MainKt"
+
+    // workaround for java 17
+    applicationDefaultJvmArgs = listOf("--add-opens", "java.base/java.nio=ALL-UNNAMED")
+}
+
+kotlinSparkApi {
+    enabled = true
+    sparkifyAnnotationFqNames = listOf("org.jetbrains.kotlinx.spark.api.plugin.annotations.Sparkify")
+}
+
+group = Versions.groupID
+version = Versions.project
+
+repositories {
+    mavenLocal()
+    mavenCentral()
+}
+
+dependencies {
+    Projects {
+        implementation(
+            // TODO kotlinSparkApi,
+        )
+    }
+
+    Dependencies {
+
+        // IMPORTANT!
+        compileOnly(sparkSqlApi)
+        implementation(sparkConnectClient)
+    }
+}
+
+// spark-connect seems to work well with java 17 as client and java 1.8 as server
+// also set gradle and your project sdk to java 17
+kotlin {
+    jvmToolchain {
+        languageVersion = JavaLanguageVersion.of(17)
+    }
+    compilerOptions {
+        jvmTarget = JvmTarget.JVM_17
+    }
+}
+
+tasks.withType<JavaCompile> {
+    sourceCompatibility = JavaVersion.VERSION_17.toString()
+    targetCompatibility = JavaVersion.VERSION_17.toString()
+}
diff --git a/spark-connect-examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt b/spark-connect-examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt
new file mode 100644
index 00000000..790bad24
--- /dev/null
+++ b/spark-connect-examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt
@@ -0,0 +1,27 @@
+package org.jetbrains.kotlinx.spark.examples
+
+import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.connect.client.REPLClassDirMonitor
+
+// run with `./gradlew run` or set VM options: "--add-opens=java.base/java.nio=ALL-UNNAMED" in the IDE
+fun main() {
+    val spark =
+        SparkSession
+            .builder()
+            .remote("sc://localhost")
+            .create()
+
+    val classFinder = REPLClassDirMonitor("/mnt/data/Projects/kotlin-spark-api/spark-connect-examples/build/classes")
+    spark.registerClassFinder(classFinder)
+    spark.addArtifact("/mnt/data/Projects/kotlin-spark-api/spark-connect-examples/build/libs/spark-connect-examples-2.0.0-SNAPSHOT.jar")
+
+    spark.sql("select 1").show()
+
+    spark.stop()
+}
+
+//@Sparkify
+//data class Person(
+//    val name: String,
+//    val age: Int,
+//)