Skip to content

Commit 0ecc4b2

Browse files
conker84jexp
authored andcommitted
fixes #70: Document Docker usage/configuration in readme/docs
1 parent 7398bc4 commit 0ecc4b2

File tree

3 files changed

+157
-0
lines changed

3 files changed

+157
-0
lines changed
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
version: '3'
2+
services:
3+
neo4j:
4+
image: neo4j:3.4
5+
hostname: neo4j
6+
container_name: neo4j
7+
ports:
8+
- "7474:7474"
9+
- "7687:7687"
10+
depends_on:
11+
- kafka
12+
volumes:
13+
- $HOME/neo4j/3.4/plugins:/plugins
14+
environment:
15+
NEO4J_AUTH: neo4j/streams
16+
NEO4J_dbms_logs_debug_level: DEBUG
17+
# KAFKA related configuration
18+
NEO4J_kafka_zookeeper_connect: zookeeper:12181
19+
NEO4J_kafka_bootstrap_servers: kafka:19092
20+
21+
zookeeper:
22+
image: confluentinc/cp-zookeeper:latest
23+
hostname: zookeeper
24+
container_name: zookeeper
25+
ports:
26+
- "12181:12181"
27+
environment:
28+
ZOOKEEPER_CLIENT_PORT: 12181
29+
30+
kafka:
31+
image: confluentinc/cp-kafka:latest
32+
hostname: kafka
33+
container_name: kafka
34+
ports:
35+
- "19092:19092"
36+
depends_on:
37+
- zookeeper
38+
environment:
39+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
40+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:12181
41+
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:19092

doc/asciidoc/docker/index.adoc

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
== Execute with Docker
2+
3+
Following you'll find a lightweight Docker Compose file that allows you to test the application in your local environment
4+
5+
Prerequisites:
6+
7+
- Docker
8+
- Docker Compose
9+
10+
Here the instruction about how to configure https://docs.docker.com/compose/install/[Docker and Docker-Compose]
11+
12+
=== Launch it locally
13+
14+
Following a compose file that allows you to spin-up Neo4j, Kafka and Zookeeper in order to test the application.
15+
16+
Before starting please change the volume directory according to yours, inside the <plugins> dir you must put Streams jar
17+
18+
```yml
19+
volumes:
20+
- $HOME/neo4j/3.4/plugins:/plugins
21+
```
22+
23+
From the same directory where the compose file is, you can launch this command:
24+
25+
$ docker-compose up -d
26+
27+
28+
Please note that the Neo4j Docker image use a naming convention; you can override every neo4j.conf property by prefix it with `NEO4J_` and using the following transformations:
29+
30+
* single underscore is converted in double underscore: `_ -> __`
31+
* point is converted in single underscore: `.` -> `_`
32+
33+
Example:
34+
35+
* `dbms.memory.heap.max_size=8G` -> `NEO4J_dbms_memory_heap_max__size: 8G`
36+
* `dbms.logs.debug.level=DEBUG` -> `NEO4J_dbms_logs_debug_level: DEBUG`
37+
38+
=== Producer
39+
40+
In case you are testing the producer you can execute a consumer that subscribes the topic `neo4j` by executing this command:
41+
42+
$ docker exec kafka kafka-console-consumer --bootstrap-server kafka:19092 --topic neo4j --from-beginning
43+
44+
Then directly from the Neo4j browser you can generate some random data with this query:
45+
46+
```cypher
47+
UNWIND range(1,100) as id
48+
CREATE (p:Person {id:id, name: "Name " + id, age: id % 3}) WITH collect(p) as people
49+
UNWIND people as p1
50+
UNWIND range(1,10) as friend
51+
WITH p1, people[(p1.id + friend) % size(people)] as p2
52+
CREATE (p1)-[:KNOWS {years: abs(p2.id - p1.id)}]->(p2)
53+
```
54+
55+
And if you go back to your consumer you'll see something like this:
56+
57+
```bash
58+
{"key":"neo4j","value":{"meta":{"timestamp":1542047038549,"username":"neo4j","txId":12,"txEventId":107,"txEventsCount":110,"operation":"created","source":{"hostname":"neo4j"}},"payload":{"id":"99","start":{"id":"9","labels":["Person"]},"end":{"id":"0","labels":["Person"]},"before":null,"after":{"properties":{"years":9}},"label":"KNOWS","type":"relationship"},"schema":{"properties":[],"constraints":null}}}
59+
{"key":"neo4j","value":{"meta":{"timestamp":1542047038549,"username":"neo4j","txId":12,"txEventId":108,"txEventsCount":110,"operation":"created","source":{"hostname":"neo4j"}},"payload":{"id":"96","start":{"id":"9","labels":["Person"]},"end":{"id":"7","labels":["Person"]},"before":null,"after":{"properties":{"years":2}},"label":"KNOWS","type":"relationship"},"schema":{"properties":[],"constraints":null}}}
60+
{"key":"neo4j","value":{"meta":{"timestamp":1542047038549,"username":"neo4j","txId":12,"txEventId":109,"txEventsCount":110,"operation":"created","source":{"hostname":"neo4j"}},"payload":{"id":"97","start":{"id":"9","labels":["Person"]},"end":{"id":"8","labels":["Person"]},"before":null,"after":{"properties":{"years":1}},"label":"KNOWS","type":"relationship"},"schema":{"properties":[],"constraints":null}}}
61+
```
62+
63+
=== Consumer
64+
65+
In case of you are using the Sink you can define your topic/cypher-query combination as it follows:
66+
67+
```yml
68+
environment:
69+
NEO4J_streams_sink_topic_neo4j:
70+
"WITH event.value.payload AS payload, event.value.meta AS meta
71+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'node' AND meta.operation <> 'deleted' and payload.after.labels[0] = 'Question' THEN [1] ELSE [] END |
72+
MERGE (n:Question{neo_id: toInteger(payload.id)}) ON CREATE
73+
SET n += payload.after.properties
74+
)
75+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'node' AND meta.operation <> 'deleted' and payload.after.labels[0] = 'Answer' THEN [1] ELSE [] END |
76+
MERGE (n:Answer{neo_id: toInteger(payload.id)}) ON CREATE
77+
SET n += payload.after.properties
78+
)
79+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'node' AND meta.operation <> 'deleted' and payload.after.labels[0] = 'User' THEN [1] ELSE [] END |
80+
MERGE (n:User{neo_id: toInteger(payload.id)}) ON CREATE
81+
SET n += payload.after.properties
82+
)
83+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'node' AND meta.operation <> 'deleted' and payload.after.labels[0] = 'Tag' THEN [1] ELSE [] END |
84+
MERGE (n:Tag{neo_id: toInteger(payload.id)}) ON CREATE
85+
SET n += payload.after.properties
86+
)
87+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'relationship' AND meta.operation <> 'deleted' and payload.label = 'ANSWERS' THEN [1] ELSE [] END |
88+
MERGE (s:Answer{neo_id: toInteger(payload.start.id)})
89+
MERGE (e:Question{neo_id: toInteger(payload.end.id)})
90+
CREATE (s)-[:ANSWERS{neo_id: toInteger(payload.id)}]->(e)
91+
)
92+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'relationship' AND meta.operation <> 'deleted' and payload.label = 'TAGGED' THEN [1] ELSE [] END |
93+
MERGE (s:Question{neo_id: toInteger(payload.start.id)})
94+
MERGE (e:Tag{neo_id: toInteger(payload.end.id)})
95+
CREATE (s)-[:TAGGED{neo_id: toInteger(payload.id)}]->(e)
96+
)
97+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'relationship' AND meta.operation <> 'deleted' and payload.label = 'PROVIDED' THEN [1] ELSE [] END |
98+
MERGE (s:User{neo_id: toInteger(payload.start.id)})
99+
MERGE (e:Answer{neo_id: toInteger(payload.end.id)})
100+
CREATE (s)-[:PROVIDED{neo_id: toInteger(payload.id)}]->(e)
101+
)
102+
FOREACH (ignoreMe IN CASE WHEN payload.type = 'relationship' AND meta.operation <> 'deleted' and payload.label = 'ASKED' THEN [1] ELSE [] END |
103+
MERGE (s:User{neo_id: toInteger(payload.start.id)})
104+
MERGE (e:Question{neo_id: toInteger(payload.end.id)})
105+
CREATE (s)-[:ASKED{neo_id: toInteger(payload.id)}]->(e)
106+
)"
107+
``
108+
109+
.docker-compose.yml
110+
```yml
111+
include::data/docker-compose.yml[]
112+
```

readme.adoc

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,10 @@ include::doc/asciidoc/producer/patterns.adoc[]
3737

3838
include::doc/asciidoc/consumer/configuration.adoc[]
3939

40+
== Docker
41+
42+
include::doc/asciidoc/docker/index.adoc[]
43+
4044
== Kafka Connect Neo4j Integration (by Confluent)
4145

4246
Source, Documentation and Support is currently not available from Confluent.

0 commit comments

Comments
 (0)