Skip to content
This repository was archived by the owner on May 25, 2023. It is now read-only.

Commit 684b7cf

Browse files
committed
Merge branch 'develop'
2 parents f8bdc6b + 689d661 commit 684b7cf

File tree

11 files changed

+111
-95
lines changed

11 files changed

+111
-95
lines changed

.travis.yml

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
language: scala
2+
sudo: false
3+
jdk: oraclejdk8
4+
scala:
5+
- 2.11.11
6+
- 2.12.4
7+
sbt_args: -mem 1500
8+
script:
9+
- sbt "++ ${TRAVIS_SCALA_VERSION}!" test
10+
cache:
11+
directories:
12+
- "$HOME/.ivy2/cache"
13+
- "$HOME/.sbt/launchers"
14+
before_cache:
15+
- find $HOME/.sbt -name "*.lock" | xargs rm
16+
- find $HOME/.ivy2 -name "ivydata-*.properties" | xargs rm

README.md

+5-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# A Thin Scala Wrapper Around the Kafka Streams Java API
22

3+
[![Build Status](https://secure.travis-ci.org/lightbend/kafka-streams-scala.png)](http://travis-ci.org/lightbend/kafka-streams-scala)
4+
35
The library wraps Java APIs in Scala thereby providing:
46

57
1. much better type inference in Scala
@@ -13,15 +15,15 @@ The design of the library was inspired by the work started by Alexis Seigneurin
1315
`kafka-streams-scala` is published and cross-built for Scala `2.11`, and `2.12`, so you can just add the following to your build:
1416

1517
```scala
16-
val kafka_streams_scala_version = "0.1.0"
18+
val kafka_streams_scala_version = "0.1.1"
1719

1820
libraryDependencies ++= Seq("com.lightbend" %%
1921
"kafka-streams-scala" % kafka_streams_scala_version)
2022
```
2123

22-
> Note: `kafka-streams-scala` supports Kafka Streams `1.0.0`.
24+
> Note: `kafka-streams-scala` supports onwards Kafka Streams `1.0.0`.
2325
24-
The API docs for `kafka-streams-scala` is available [here](https://developer.lightbend.com/docs/api/kafka-streams-scala/0.1.0/com/lightbend/kafka/scala/streams) for Scala 2.12 and [here](https://developer.lightbend.com/docs/api/kafka-streams-scala_2.11/0.1.0/#package) for Scala 2.11.
26+
The API docs for `kafka-streams-scala` is available [here](https://developer.lightbend.com/docs/api/kafka-streams-scala/0.1.1/com/lightbend/kafka/scala/streams) for Scala 2.12 and [here](https://developer.lightbend.com/docs/api/kafka-streams-scala_2.11/0.1.1/#package) for Scala 2.11.
2527

2628
## Running the Tests
2729

build.sbt

+2-2
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@ name := "kafka-streams-scala"
44

55
organization := "com.lightbend"
66

7-
version := "0.1.0"
7+
version := "0.1.1"
88

99
scalaVersion := Versions.Scala_2_12_Version
1010

1111
crossScalaVersions := Versions.CrossScalaVersions
1212

13-
scalacOptions := Seq("-Xexperimental", "-unchecked", "-deprecation")
13+
scalacOptions := Seq("-Xexperimental", "-unchecked", "-deprecation", "-Ywarn-unused-import")
1414

1515
parallelExecution in Test := false
1616

project/build.properties

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
sbt.version=1.1.0

src/main/scala/com/lightbend/kafka/scala/streams/KStreamS.scala

+13-9
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ import scala.collection.JavaConverters._
1414

1515
/**
1616
* Wraps the Java class KStream and delegates method calls to the underlying Java object.
17-
*/
17+
*/
1818
class KStreamS[K, V](val inner: KStream[K, V]) {
1919

2020
def filter(predicate: (K, V) => Boolean): KStreamS[K, V] = {
@@ -47,7 +47,7 @@ class KStreamS[K, V](val inner: KStream[K, V]) {
4747
inner.flatMapValues[VR]((v) => processor(v).asJava)
4848
}
4949

50-
def print(printed: Printed[K, V]) = inner.print(printed)
50+
def print(printed: Printed[K, V]): Unit = inner.print(printed)
5151

5252
def foreach(action: (K, V) => Unit): Unit = {
5353
inner.foreach((k, v) => action(k, v))
@@ -74,15 +74,19 @@ class KStreamS[K, V](val inner: KStream[K, V]) {
7474
val transformerS: Transformer[K, V, (K1, V1)] = transformerSupplier()
7575
new Transformer[K, V, KeyValue[K1, V1]] {
7676
override def transform(key: K, value: V): KeyValue[K1, V1] = {
77-
val (k1,v1) = transformerS.transform(key, value)
78-
KeyValue.pair(k1, v1)
77+
transformerS.transform(key, value) match {
78+
case (k1,v1) => KeyValue.pair(k1, v1)
79+
case _ => null
80+
}
7981
}
8082

8183
override def init(context: ProcessorContext): Unit = transformerS.init(context)
8284

8385
override def punctuate(timestamp: Long): KeyValue[K1, V1] = {
84-
val (k1,v1) = transformerS.punctuate(timestamp)
85-
KeyValue.pair[K1, V1](k1, v1)
86+
transformerS.punctuate(timestamp) match {
87+
case (k1, v1) => KeyValue.pair[K1, V1](k1, v1)
88+
case _ => null
89+
}
8690
}
8791

8892
override def close(): Unit = transformerS.close()
@@ -99,7 +103,7 @@ class KStreamS[K, V](val inner: KStream[K, V]) {
99103
}
100104

101105
def process(processorSupplier: () => Processor[K, V],
102-
stateStoreNames: String*) = {
106+
stateStoreNames: String*): Unit = {
103107

104108
val processorSupplierJ: ProcessorSupplier[K, V] = () => processorSupplier()
105109
inner.process(processorSupplierJ, stateStoreNames: _*)
@@ -206,13 +210,13 @@ class KStreamS[K, V](val inner: KStream[K, V]) {
206210

207211
def merge(stream: KStreamS[K, V]): KStreamS[K, V] = inner.merge(stream)
208212

209-
def peek(action: (K, V) => Unit): KStream[K, V] = {
213+
def peek(action: (K, V) => Unit): KStreamS[K, V] = {
210214
inner.peek(action(_,_))
211215
}
212216

213217
// -- EXTENSIONS TO KAFKA STREAMS --
214218

215-
// applies the predicate to know what messages shuold go to the left stream (predicate == true)
219+
// applies the predicate to know what messages should go to the left stream (predicate == true)
216220
// or to the right stream (predicate == false)
217221
def split(predicate: (K, V) => Boolean): (KStreamS[K, V], KStreamS[K, V]) = {
218222
(this.filter(predicate), this.filterNot(predicate))
Original file line numberDiff line numberDiff line change
@@ -1,67 +1,74 @@
11
/**
2-
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3-
*/
2+
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3+
*/
44

55
package com.lightbend.kafka.scala.streams
66

77
import java.util.regex.Pattern
88

9-
import ImplicitConversions._
10-
import org.apache.kafka.streams.kstream.{ GlobalKTable, Materialized }
9+
import com.lightbend.kafka.scala.streams.ImplicitConversions._
10+
import org.apache.kafka.common.utils.Bytes
11+
import org.apache.kafka.streams.kstream.{GlobalKTable, Materialized}
1112
import org.apache.kafka.streams.processor.{ProcessorSupplier, StateStore}
12-
import org.apache.kafka.streams.state.{ StoreBuilder, KeyValueStore }
13+
import org.apache.kafka.streams.state.{KeyValueStore, StoreBuilder}
1314
import org.apache.kafka.streams.{Consumed, StreamsBuilder, Topology}
14-
import org.apache.kafka.common.utils.Bytes
1515

1616
import scala.collection.JavaConverters._
1717

1818
/**
19-
* Wraps the Java class StreamsBuilder and delegates method calls to the underlying Java object.
20-
*/
21-
class StreamsBuilderS {
19+
* Wraps the Java class StreamsBuilder and delegates method calls to the underlying Java object.
20+
*/
21+
class StreamsBuilderS(inner: StreamsBuilder = new StreamsBuilder) {
2222

23-
val inner = new StreamsBuilder
23+
def stream[K, V](topic: String): KStreamS[K, V] =
24+
inner.stream[K, V](topic)
2425

25-
def stream[K, V](topic: String) : KStreamS[K, V] =
26-
inner.stream[K, V](topic)
26+
def stream[K, V](topic: String, consumed: Consumed[K, V]): KStreamS[K, V] =
27+
inner.stream[K, V](topic, consumed)
2728

28-
def stream[K, V](topic: String, consumed: Consumed[K, V]) : KStreamS[K, V] =
29-
inner.stream[K, V](topic, consumed)
30-
31-
def stream[K, V](topics: List[String]): KStreamS[K, V] =
32-
inner.stream[K, V](topics.asJava)
29+
def stream[K, V](topics: List[String]): KStreamS[K, V] =
30+
inner.stream[K, V](topics.asJava)
3331

3432
def stream[K, V](topics: List[String], consumed: Consumed[K, V]): KStreamS[K, V] =
35-
inner.stream[K, V](topics.asJava, consumed)
33+
inner.stream[K, V](topics.asJava, consumed)
3634

37-
def stream[K, V](topicPattern: Pattern) : KStreamS[K, V] =
35+
def stream[K, V](topicPattern: Pattern): KStreamS[K, V] =
3836
inner.stream[K, V](topicPattern)
3937

40-
def stream[K, V](topicPattern: Pattern, consumed: Consumed[K, V]) : KStreamS[K, V] =
38+
def stream[K, V](topicPattern: Pattern, consumed: Consumed[K, V]): KStreamS[K, V] =
4139
inner.stream[K, V](topicPattern, consumed)
4240

43-
def table[K, V](topic: String) : KTableS[K, V] = inner.table[K, V](topic)
41+
def table[K, V](topic: String): KTableS[K, V] = inner.table[K, V](topic)
4442

45-
def table[K, V](topic: String, consumed: Consumed[K, V]) : KTableS[K, V] =
43+
def table[K, V](topic: String, consumed: Consumed[K, V]): KTableS[K, V] =
4644
inner.table[K, V](topic, consumed)
4745

4846
def table[K, V](topic: String, consumed: Consumed[K, V],
49-
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, V] =
50-
inner.table[K, V](topic, consumed, materialized)
47+
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, V] =
48+
inner.table[K, V](topic, consumed, materialized)
5149

52-
def table[K, V](topic: String,
53-
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, V] =
50+
def table[K, V](topic: String,
51+
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, V] =
5452
inner.table[K, V](topic, materialized)
5553

5654
def globalTable[K, V](topic: String): GlobalKTable[K, V] =
5755
inner.globalTable(topic)
5856

59-
def addStateStore(builder: StoreBuilder[_ <: StateStore]): StreamsBuilder = inner.addStateStore(builder)
57+
def globalTable[K, V](topic: String, consumed: Consumed[K, V]): GlobalKTable[K, V] =
58+
inner.globalTable(topic, consumed)
6059

61-
def addGlobalStore(storeBuilder: StoreBuilder[_ <: StateStore], topic: String, sourceName: String, consumed: Consumed[_, _], processorName: String, stateUpdateSupplier: ProcessorSupplier[_, _]): StreamsBuilder =
62-
inner.addGlobalStore(storeBuilder,topic,sourceName,consumed,processorName,stateUpdateSupplier)
60+
def globalTable[K, V](topic: String, consumed: Consumed[K, V],
61+
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): GlobalKTable[K, V] =
62+
inner.globalTable(topic, consumed, materialized)
63+
64+
def globalTable[K, V](topic: String,
65+
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): GlobalKTable[K, V] =
66+
inner.globalTable(topic, materialized)
6367

64-
def build() : Topology = inner.build()
65-
}
68+
def addStateStore(builder: StoreBuilder[_ <: StateStore]): StreamsBuilder = inner.addStateStore(builder)
6669

70+
def addGlobalStore(storeBuilder: StoreBuilder[_ <: StateStore], topic: String, sourceName: String, consumed: Consumed[_, _], processorName: String, stateUpdateSupplier: ProcessorSupplier[_, _]): StreamsBuilder =
71+
inner.addGlobalStore(storeBuilder, topic, sourceName, consumed, processorName, stateUpdateSupplier)
6772

73+
def build(): Topology = inner.build()
74+
}

src/test/scala/com/lightbend/kafka/scala/server/KafkaLocalServer.scala

-3
Original file line numberDiff line numberDiff line change
@@ -8,17 +8,14 @@ package com.lightbend.kafka.scala.server
88
// https://github.com/lagom/lagom/blob/master/dev/kafka-server/src/main/scala/com/lightbend/lagom/internal/kafka/KafkaLocalServer.scala
99

1010
import java.io.{ IOException, File }
11-
import java.nio.file.{ FileVisitOption, Files, Paths }
1211
import java.util.Properties
1312

1413
import org.apache.curator.test.TestingServer
1514
import com.typesafe.scalalogging.LazyLogging
1615

1716
import kafka.server.{KafkaConfig, KafkaServerStartable}
1817

19-
import scala.collection.JavaConverters._
2018
import scala.util.{ Try, Success, Failure }
21-
import java.util.Comparator
2219

2320
import kafka.admin.{AdminUtils, RackAwareMode}
2421
import kafka.utils.ZkUtils

src/test/scala/com/lightbend/kafka/scala/server/Utils.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import java.io.File
99
import java.nio.file.{ FileVisitOption, Files, Paths }
1010
import java.util.Comparator
1111

12-
import scala.util.{ Try, Success, Failure }
12+
import scala.util.Try
1313
import scala.collection.JavaConverters._
1414

1515
object Utils {

src/test/scala/com/lightbend/kafka/scala/streams/KafkaStreamsTest.scala

+17-21
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,18 @@
11
/**
2-
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3-
*/
2+
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3+
*/
44

55
package com.lightbend.kafka.scala.streams
66

7-
import minitest.TestSuite
8-
import com.lightbend.kafka.scala.server.{ KafkaLocalServer, MessageSender, MessageListener, RecordProcessorTrait }
9-
10-
import java.util.{ Properties, Locale }
7+
import java.util.Properties
118
import java.util.regex.Pattern
129

13-
import org.apache.kafka.streams.{ KeyValue, StreamsConfig, KafkaStreams, Consumed }
14-
import org.apache.kafka.streams.kstream.{ Materialized, Produced, KeyValueMapper, Printed }
15-
import org.apache.kafka.common.serialization.{ Serdes, StringSerializer, StringDeserializer, Serde, LongDeserializer }
10+
import com.lightbend.kafka.scala.server.{KafkaLocalServer, MessageListener, MessageSender, RecordProcessorTrait}
11+
import minitest.TestSuite
1612
import org.apache.kafka.clients.consumer.ConsumerRecord
17-
18-
import scala.concurrent.duration._
19-
20-
import ImplicitConversions._
13+
import org.apache.kafka.common.serialization._
14+
import org.apache.kafka.streams.kstream.Produced
15+
import org.apache.kafka.streams.{KafkaStreams, KeyValue, StreamsConfig}
2116

2217
object KafkaStreamsTest extends TestSuite[KafkaLocalServer] with WordCountTestData {
2318

@@ -45,19 +40,19 @@ object KafkaStreamsTest extends TestSuite[KafkaLocalServer] with WordCountTestDa
4540
val streamsConfiguration = new Properties()
4641
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, s"wordcount-${scala.util.Random.nextInt(100)}")
4742
streamsConfiguration.put(StreamsConfig.CLIENT_ID_CONFIG, "wordcountgroup")
48-
43+
4944
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
5045
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName())
5146
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName())
5247
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, localStateDir)
5348

54-
val builder = new StreamsBuilderS
49+
val builder = new StreamsBuilderS()
5550

5651
val textLines = builder.stream[String, String](inputTopic)
5752

5853
val pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS)
5954

60-
val wordCounts: KTableS[String, Long] =
55+
val wordCounts: KTableS[String, Long] =
6156
textLines.flatMapValues(v => pattern.split(v.toLowerCase))
6257
.groupBy((k, v) => v)
6358
.count()
@@ -70,15 +65,15 @@ object KafkaStreamsTest extends TestSuite[KafkaLocalServer] with WordCountTestDa
7065
//
7166
// Step 2: Produce some input data to the input topic.
7267
//
73-
val sender = MessageSender[String, String](brokers, classOf[StringSerializer].getName, classOf[StringSerializer].getName)
68+
val sender = MessageSender[String, String](brokers, classOf[StringSerializer].getName, classOf[StringSerializer].getName)
7469
val mvals = sender.batchWriteValue(inputTopic, inputValues)
7570

7671
//
7772
// Step 3: Verify the application's output data.
7873
//
79-
val listener = MessageListener(brokers, outputTopic, "wordcountgroup",
80-
classOf[StringDeserializer].getName,
81-
classOf[LongDeserializer].getName,
74+
val listener = MessageListener(brokers, outputTopic, "wordcountgroup",
75+
classOf[StringDeserializer].getName,
76+
classOf[LongDeserializer].getName,
8277
new RecordProcessor
8378
)
8479

@@ -90,10 +85,11 @@ object KafkaStreamsTest extends TestSuite[KafkaLocalServer] with WordCountTestDa
9085
}
9186

9287
class RecordProcessor extends RecordProcessorTrait[String, Long] {
93-
override def processRecord(record: ConsumerRecord[String, Long]): Unit = {
88+
override def processRecord(record: ConsumerRecord[String, Long]): Unit = {
9489
// println(s"Get Message $record")
9590
}
9691
}
92+
9793
}
9894

9995
trait WordCountTestData {

0 commit comments

Comments
 (0)