Skip to content

Commit

Permalink
chore: version upgrade + improve docker compose
Browse files Browse the repository at this point in the history
  • Loading branch information
loveirobert committed Jun 5, 2024
1 parent b790652 commit 2fea189
Show file tree
Hide file tree
Showing 15 changed files with 395 additions and 158 deletions.
2 changes: 1 addition & 1 deletion modules/azure-openai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/azure-openai",
"version": "0.1.50",
"version": "0.1.52",
"dependencies": {
"@azure/openai": "^1.0.0-beta.11",
"@one-beyond-ai/common": "*"
Expand Down
2 changes: 1 addition & 1 deletion modules/common/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/common",
"version": "0.1.50",
"version": "0.1.52",
"dependencies": {
"tslib": "^2.3.0"
},
Expand Down
2 changes: 1 addition & 1 deletion modules/cost/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/cost",
"version": "0.1.50",
"version": "0.1.52",
"dependencies": {
"@one-beyond-ai/common": "*"
},
Expand Down
2 changes: 1 addition & 1 deletion modules/embed/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/embed",
"version": "0.1.50",
"version": "0.1.52",
"main": "./index.js",
"module": "./index.mjs",
"typings": "./index.d.ts",
Expand Down
2 changes: 1 addition & 1 deletion modules/openai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/openai",
"version": "0.1.50",
"version": "0.1.52",
"dependencies": {
"openai": "^4.29.2",
"@one-beyond-ai/common": "*"
Expand Down
2 changes: 1 addition & 1 deletion modules/text-document-extractor/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/text-document-extractor",
"version": "0.1.50",
"version": "0.1.52",
"dependencies": {
"langchain": "^0.1.26",
"mammoth": "^1.7.0",
Expand Down
2 changes: 1 addition & 1 deletion modules/tokenizer/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@one-beyond-ai/tokenizer",
"version": "0.1.50",
"version": "0.1.52",
"dependencies": {
"js-tiktoken": "^1.0.10",
"langchain": "^0.1.30",
Expand Down
44 changes: 43 additions & 1 deletion samples/one-beyond-ai-aws/infra/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,46 @@ ELASTIC_NODE=https://localhost:9200
ELASTIC_USERNAME=elastic
ELASTIC_PASSWORD=YOUR_ELASTIC_PASSWORD
ELASTIC_INDEX=documents
ELASTIC_CA_CERT=YOUR_ELASTIC_CA_CERT
ELASTIC_CA_CERT=YOUR_ELASTIC_CA_CERT

# Project namespace (defaults to the current folder name if not set)
#COMPOSE_PROJECT_NAME=myproject


# Password for the 'elastic' user (at least 6 characters)
ELASTIC_PASSWORD=QWEasd123%


# Password for the 'kibana_system' user (at least 6 characters)
KIBANA_PASSWORD=QWEasd123%


# Version of Elastic products
STACK_VERSION=8.13.2


# Set the cluster name
CLUSTER_NAME=docker-cluster


# Set to 'basic' or 'trial' to automatically start the 30-day trial
LICENSE=basic
#LICENSE=trial


# Port to expose Elasticsearch HTTP API to the host
ES_PORT=9200


# Port to expose Kibana to the host
KIBANA_PORT=5601


# Increase or decrease based on the available host memory (in bytes)
ES_MEM_LIMIT=1073741824
KB_MEM_LIMIT=1073741824
LS_MEM_LIMIT=1073741824


# SAMPLE Predefined Key only to be used in POC environments
ENCRYPTION_KEY=c34d38b3a14956121ff2170e5030b471551370178f43e5626eec58b04a30fae2
61 changes: 31 additions & 30 deletions samples/one-beyond-ai-aws/infra/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,35 +6,36 @@ The `cdk.json` file tells the CDK Toolkit how to execute your app.

## Useful commands locally

* `docker-compose up` spin up localstack
* `npm install` spin up localstack
* `npm install -g aws-cdk-local aws-cdk` install cdk and cdklocal
* `pip install awscli-local` install awslocal command
* `aws configure` configure aws env locally - key: fake, secret: fake, region: eu-west-1, output: json
* `npm run watch` watch for changes and compile
* `npm run test` perform the jest unit tests
* `cdklocal bootstrap` deploy this stack to your default AWS account/region locally
* `cdklocal diff` compare deployed stack with current state
* `cdklocal deploy` deploy this stack to your default AWS account/region
* `cdklocal synth` emits the synthesized CloudFormation template
* `awslocal s3api put-bucket-notification-configuration --bucket file-bucket --notification-configuration file://event-config/events.json` To connect s3 events to lambda handler
* `awslocal s3api put-object --bucket file-bucket --key test001.txt --body=test.txt` To upload files to S3
* `aws logs describe-log-streams --log-group-name '/aws/lambda/localOneBeyondAIInfraFileUploadedEventHandler'` Query lambda log streams
* `awslocal logs get-log-events --log-group-name '/aws/lambda/localOneBeyondAIInfraFileUploadedEventHandler' --log-stream-name '2024/04/09/[$LATEST]50482fc7a2a12e23715128fcfe2af71e'` List logs from log stream
* `awslocal logs delete-log-stream --log-group-name '/aws/lambda/localOneBeyondAIInfraFileUploadedEventHandler' --log-stream-name '2024/04/16/[$LATEST]400e6b72bc9a7580648115fd8b626765'` Delete a log stream
* `awslocal cloudformation delete-stack --stack-name localOneBeyondAIInfraStack` To delete a stack
* `awslocal cloudformation describe-stacks` To list stacks
* `awslocal sns publish --topic-arn arn:aws:sns:eu-west-1:000000000000:localOneBeyondAIInfraFilePutEventTopic --message=hellohello` To test sns publish
* `docker network create elastic` to create elastic docker network
* `docker pull docker.elastic.co/elasticsearch/elasticsearch:8.13.2` to pull elasticsearch docker image
* `docker run --name elasticsearch --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t docker.elastic.co/elasticsearch/elasticsearch:8.13.2` to run the elastic container
* `docker pull docker.elastic.co/kibana/kibana:8.13.2` to pull kibana
* `docker run --name kibana --net elastic -p 5601:5601 docker.elastic.co/kibana/kibana:8.13.2` to run kibana
* `npx ts-node ./scripts/create-elastic-index.ts` to create your elastic index
- `docker-compose up` spin up localstack
- `npm install` spin up localstack
- `npm install -g aws-cdk-local aws-cdk` install cdk and cdklocal
- `pip install awscli-local` install awslocal command
- `aws configure` configure aws env locally - key: fake, secret: fake, region: eu-west-1, output: json
- `npm run watch` watch for changes and compile
- `npm run test` perform the jest unit tests
- `cdklocal bootstrap` deploy this stack to your default AWS account/region locally
- `cdklocal diff` compare deployed stack with current state
- `cdklocal deploy` deploy this stack to your default AWS account/region
- `cdklocal synth` emits the synthesized CloudFormation template
- `awslocal s3api put-bucket-notification-configuration --bucket file-bucket --notification-configuration file://event-config/events.json` To connect s3 events to lambda handler
- `awslocal s3api put-object --bucket file-bucket --key test001.txt --body=test.txt` To upload files to S3
- `awslocal logs describe-log-streams --log-group-name '/aws/lambda/localOneBeyondAIInfraFileUploadedEventHandler'` Query lambda log streams
- `awslocal logs get-log-events --log-group-name '/aws/lambda/localOneBeyondAIInfraFileUploadedEventHandler' --log-stream-name '2024/04/09/[$LATEST]50482fc7a2a12e23715128fcfe2af71e'` List logs from log stream
- `awslocal logs delete-log-stream --log-group-name '/aws/lambda/localOneBeyondAIInfraFileUploadedEventHandler' --log-stream-name '2024/04/16/[$LATEST]400e6b72bc9a7580648115fd8b626765'` Delete a log stream
- `awslocal cloudformation delete-stack --stack-name localOneBeyondAIInfraStack` To delete a stack
- `awslocal cloudformation describe-stacks` To list stacks
- `awslocal sns publish --topic-arn arn:aws:sns:eu-west-1:000000000000:localOneBeyondAIInfraFilePutEventTopic --message=hellohello` To test sns publish
- `docker network create elastic` to create elastic docker network
- `docker pull docker.elastic.co/elasticsearch/elasticsearch:8.13.2` to pull elasticsearch docker image
- `docker run --name elasticsearch --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t docker.elastic.co/elasticsearch/elasticsearch:8.13.2` to run the elastic container
- `docker pull docker.elastic.co/kibana/kibana:8.13.2` to pull kibana
- `docker run --name kibana --net elastic -p 5601:5601 docker.elastic.co/kibana/kibana:8.13.2` to run kibana
- `npx ts-node ./scripts/create-elastic-index.ts` to create your elastic index

## Useful commands
* `npm run watch` watch for changes and compile
* `npm run test` perform the jest unit tests
* `npx cdk deploy` deploy this stack to your default AWS account/region
* `npx cdk diff` compare deployed stack with current state
* `npx cdk synth` emits the synthesized CloudFormation template

- `npm run watch` watch for changes and compile
- `npm run test` perform the jest unit tests
- `npx cdk deploy` deploy this stack to your default AWS account/region
- `npx cdk diff` compare deployed stack with current state
- `npx cdk synth` emits the synthesized CloudFormation template
204 changes: 198 additions & 6 deletions samples/one-beyond-ai-aws/infra/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,208 @@
version: "3.8"
version: '3.8'

volumes:
certs:
driver: local
esdata01:
driver: local
kibanadata:
driver: local
metricbeatdata01:
driver: local
filebeatdata01:
driver: local
logstashdata01:
driver: local

networks:
default:
name: elastic
external: false

services:
localstack:
container_name: "${LOCALSTACK_DOCKER_NAME:-localstack-one-beyond-ai-main}"
container_name: '${LOCALSTACK_DOCKER_NAME:-localstack-one-beyond-ai-main}'
image: localstack/localstack
ports:
- "127.0.0.1:4566:4566" # LocalStack Gateway
- "127.0.0.1:4510-4559:4510-4559" # external services port range
- '127.0.0.1:4566:4566' # LocalStack Gateway
- '127.0.0.1:4510-4559:4510-4559' # external services port range
environment:
# LocalStack configuration: https://docs.localstack.cloud/references/configuration/
- DEBUG=${DEBUG:-0}
- DEFAULT_REGION=eu-west-1
volumes:
- "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack"
- "/var/run/docker.sock:/var/run/docker.sock"
- '${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack'
- '/var/run/docker.sock:/var/run/docker.sock'

setup:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
volumes:
- certs:/usr/share/elasticsearch/config/certs
user: '0'
command: >
bash -c '
if [ x${ELASTIC_PASSWORD} == x ]; then
echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
exit 1;
elif [ x${KIBANA_PASSWORD} == x ]; then
echo "Set the KIBANA_PASSWORD environment variable in the .env file";
exit 1;
fi;
if [ ! -f config/certs/ca.zip ]; then
echo "Creating CA";
bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip;
unzip config/certs/ca.zip -d config/certs;
fi;
if [ ! -f config/certs/certs.zip ]; then
echo "Creating certs";
echo -ne \
"instances:\n"\
" - name: es01\n"\
" dns:\n"\
" - es01\n"\
" - localhost\n"\
" ip:\n"\
" - 127.0.0.1\n"\
" - name: kibana\n"\
" dns:\n"\
" - kibana\n"\
" - localhost\n"\
" ip:\n"\
" - 127.0.0.1\n"\
> config/certs/instances.yml;
bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key;
unzip config/certs/certs.zip -d config/certs;
fi;
echo "Setting file permissions"
chown -R root:root config/certs;
find . -type d -exec chmod 750 \{\} \;;
find . -type f -exec chmod 640 \{\} \;;
echo "Waiting for Elasticsearch availability";
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
echo "Setting kibana_system password";
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
echo "All done!";
'
healthcheck:
test: ['CMD-SHELL', '[ -f config/certs/es01/es01.crt ]']
interval: 1s
timeout: 5s
retries: 120

es01:
depends_on:
setup:
condition: service_healthy
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
labels:
co.elastic.logs/module: elasticsearch
volumes:
- certs:/usr/share/elasticsearch/config/certs
- esdata01:/usr/share/elasticsearch/data
ports:
- ${ES_PORT}:9200
environment:
- node.name=es01
- cluster.name=${CLUSTER_NAME}
- discovery.type=single-node
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- bootstrap.memory_lock=true
- xpack.security.enabled=true
- xpack.security.http.ssl.enabled=true
- xpack.security.http.ssl.key=certs/es01/es01.key
- xpack.security.http.ssl.certificate=certs/es01/es01.crt
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.security.transport.ssl.enabled=true
- xpack.security.transport.ssl.key=certs/es01/es01.key
- xpack.security.transport.ssl.certificate=certs/es01/es01.crt
- xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.security.transport.ssl.verification_mode=certificate
- xpack.license.self_generated.type=${LICENSE}
mem_limit: ${ES_MEM_LIMIT}
ulimits:
memlock:
soft: -1
hard: -1
healthcheck:
test:
[
'CMD-SHELL',
"curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
]
interval: 10s
timeout: 10s
retries: 120

kibana:
depends_on:
es01:
condition: service_healthy
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
labels:
co.elastic.logs/module: kibana
volumes:
- certs:/usr/share/kibana/config/certs
- kibanadata:/usr/share/kibana/data
ports:
- ${KIBANA_PORT}:5601
environment:
- SERVERNAME=kibana
- ELASTICSEARCH_HOSTS=https://es01:9200
- ELASTICSEARCH_USERNAME=kibana_system
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
- ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
- XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY}
- XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY}
- XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY}
mem_limit: ${KB_MEM_LIMIT}
healthcheck:
test: ['CMD-SHELL', "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"]
interval: 10s
timeout: 10s
retries: 120

metricbeat01:
depends_on:
es01:
condition: service_healthy
kibana:
condition: service_healthy
image: docker.elastic.co/beats/metricbeat:${STACK_VERSION}
user: root
volumes:
- certs:/usr/share/metricbeat/certs
- metricbeatdata01:/usr/share/metricbeat/data
- './metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro'
- '/var/run/docker.sock:/var/run/docker.sock:ro'
- '/sys/fs/cgroup:/hostfs/sys/fs/cgroup:ro'
- '/proc:/hostfs/proc:ro'
- '/:/hostfs:ro'
environment:
- ELASTIC_USER=elastic
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- ELASTIC_HOSTS=https://es01:9200
- KIBANA_HOSTS=http://kibana:5601
- LOGSTASH_HOSTS=http://logstash01:9600
command:
- --strict.perms=false

logstash01:
depends_on:
es01:
condition: service_healthy
kibana:
condition: service_healthy
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
labels:
co.elastic.logs/module: logstash
user: root
volumes:
- certs:/usr/share/logstash/certs
- logstashdata01:/usr/share/logstash/data
- './logstash_ingest_data/:/usr/share/logstash/ingest_data/'
- './logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro'
environment:
- xpack.monitoring.enabled=false
- ELASTIC_USER=elastic
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- ELASTIC_HOSTS=https://es01:9200
Loading

0 comments on commit 2fea189

Please sign in to comment.