diff --git a/.bazelignore b/.bazelignore deleted file mode 100644 index 3c3629e64..000000000 --- a/.bazelignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/.bazelrc b/.bazelrc deleted file mode 100644 index 09cb7d2fe..000000000 --- a/.bazelrc +++ /dev/null @@ -1,98 +0,0 @@ -# Common Bazel settings for JavaScript/NodeJS workspaces -# This rc file is automatically discovered when Bazel is run in this workspace, -# see https://docs.bazel.build/versions/master/guide.html#bazelrc -# -# The full list of Bazel options: https://docs.bazel.build/versions/master/command-line-reference.html - -# Cache action outputs on disk so they persist across output_base and bazel shutdown (eg. changing branches) -build --disk_cache=~/.cache/bazel-disk-cache - -# Make TypeScript and Angular compilation fast, by keeping a few copies of the -# compiler running as daemons, and cache SourceFile AST's to reduce parse time. -build --strategy=TypeScriptCompile=worker - -# Bazel will create symlinks from the workspace directory to output artifacts. -# Build results will be placed in a directory called "dist/bin" -# Other directories will be created like "dist/testlogs" -# Be aware that this will still create a bazel-out symlink in -# your project directory, which you must exclude from version control and your -# editor's search path. -build --symlink_prefix=dist/ -# To disable the symlinks altogether (including bazel-out) you can use -# build --symlink_prefix=/ -# however this makes it harder to find outputs. - -# Releases should always be stamped with version control info -# build --workspace_status_command=./tools/bazel_stamp_vars.sh - -# Specifies desired output mode for running tests. -# Valid values are -# 'summary' to output only test status summary -# 'errors' to also print test logs for failed tests -# 'all' to print logs for all tests -# 'streamed' to output logs for all tests in real time -# (this will force tests to be executed locally one at a time regardless of --test_strategy value). -test --test_output=all - -# Support for debugging NodeJS tests -# Add the Bazel option `--config=debug` to enable this -# --test_output=streamed -# Stream stdout/stderr output from each test in real-time. -# See https://docs.bazel.build/versions/master/user-manual.html#flag--test_output for more details. -# --test_strategy=exclusive -# Run one test at a time. -# --test_timeout=9999 -# Prevent long running tests from timing out -# See https://docs.bazel.build/versions/master/user-manual.html#flag--test_timeout for more details. -# --nocache_test_results -# Always run tests -# --node_options=--inspect-brk -# Pass the --inspect-brk option to all tests which enables the node inspector agent. -# See https://nodejs.org/de/docs/guides/debugging-getting-started/#command-line-options for more details. -# --define=VERBOSE_LOGS=1 -# Rules will output verbose logs if the VERBOSE_LOGS environment variable is set. `VERBOSE_LOGS` will be passed to -# `nodejs_binary` and `nodejs_test` via the default value of the `default_env_vars` attribute of those rules. -# --compilation_mode=dbg -# Rules may change their build outputs if the compilation mode is set to dbg. For example, -# mininfiers such as terser may make their output more human readable when this is set. `COMPILATION_MODE` will be passed to -# `nodejs_binary` and `nodejs_test` via the default value of the `default_env_vars` attribute of those rules. -# See https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode for more details. -# test:debug --test_output=streamed --test_strategy=exclusive --test_timeout=9999 --nocache_test_results --define=VERBOSE_LOGS=1 - -# If we send debug config we can inspect the proccess in chrome -test:debug --test_arg=--node_options=--inspect-brk --test_output=streamed --test_strategy=exclusive --test_timeout=9999 --nocache_test_results - -# Use bazel run with `--config=debug` to turn on the NodeJS inspector agent. -# The node process will break before user code starts and wait for the debugger to connect. -run:debug --define=VERBOSE_LOGS=1 -- --node_options=--inspect-brk -# The following option will change the build output of certain rules such as terser and may not be desirable in all cases -build:debug --compilation_mode=dbg - -# Turn off legacy external runfiles -# This prevents accidentally depending on this feature, which Bazel will remove. -build --nolegacy_external_runfiles - -# Turn on the "Managed Directories" feature. -# This allows Bazel to share the same node_modules directory with other tools -# NB: this option was introduced in Bazel 0.26 -# See https://docs.bazel.build/versions/master/command-line-reference.html#flag--experimental_allow_incremental_repository_updates -common --experimental_allow_incremental_repository_updates - -# Turn on --incompatible_strict_action_env which was on by default -# in Bazel 0.21.0 but turned off again in 0.22.0. Follow -# https://github.com/bazelbuild/bazel/issues/7026 for more details. -# This flag is needed to so that the bazel cache is not invalidated -# when running bazel via `yarn bazel`. -# See https://github.com/angular/angular/issues/27514. -build --incompatible_strict_action_env -run --incompatible_strict_action_env - -# Load any settings specific to the current user. -# .bazelrc.user should appear in .gitignore so that settings are not shared with team members -# This needs to be last statement in this -# config, as the user configuration should be able to overwrite flags from this file. -# See https://docs.bazel.build/versions/master/best-practices.html#bazelrc -# (Note that we use .bazelrc.user so the file appears next to .bazelrc in directory listing, -# rather than user.bazelrc as suggested in the Bazel docs) -try-import %workspace%/.bazelrc.user - diff --git a/.ci/.gitignore b/.ci/.gitignore deleted file mode 100644 index 3c3629e64..000000000 --- a/.ci/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/.ci/Pulumi.website-catalysts-dev.yaml b/.ci/Pulumi.website-catalysts-dev.yaml deleted file mode 100644 index 887fa3aac..000000000 --- a/.ci/Pulumi.website-catalysts-dev.yaml +++ /dev/null @@ -1,3 +0,0 @@ -encryptionsalt: v1:9X3+G/+xJoE=:v1:oAp27x9lCRuwv+Vm:8LrLnqvlQNaLS7fo/e+AND3QURJovg== -config: - aws:region: us-east-1 diff --git a/.ci/Pulumi.yaml b/.ci/Pulumi.yaml deleted file mode 100644 index 0071783e1..000000000 --- a/.ci/Pulumi.yaml +++ /dev/null @@ -1,3 +0,0 @@ -name: website-catalysts -runtime: nodejs -description: Catalyst status diff --git a/.ci/index.ts b/.ci/index.ts deleted file mode 100644 index 3d501addd..000000000 --- a/.ci/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { env, envTLD } from 'dcl-ops-lib/domain' -import { buildStatic } from 'dcl-ops-lib/buildStatic' -import { globalConfig } from 'dcl-ops-lib/values' - -const { defaultSecurityGroupName } = globalConfig[env] - -async function main() { - const builder = buildStatic({ - domain: `catalysts.decentraland.${envTLD}` - }) - - return { - cloudfrontDistribution: builder.cloudfrontDistribution, - bucketName: builder.contentBucket - } -} -export = main diff --git a/.ci/package.json b/.ci/package.json deleted file mode 100644 index 8ce9a765e..000000000 --- a/.ci/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "website-catalysts", - "devDependencies": { - "@types/node": "^14.0.13" - }, - "dependencies": { - "@pulumi/aws": "^2.9.0", - "@pulumi/awsx": "^0.20.0", - "@pulumi/eks": "^0.19.2", - "@pulumi/kubernetes": "^2.3.0", - "@pulumi/pulumi": "^2.4.0", - "dcl-ops-lib": "2.0.10" - } -} diff --git a/.ci/tsconfig.json b/.ci/tsconfig.json deleted file mode 100644 index c97c97deb..000000000 --- a/.ci/tsconfig.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "compilerOptions": { - "strict": true, - "outDir": "bin", - "target": "es2016", - "module": "commonjs", - "moduleResolution": "node", - "esModuleInterop": true, - "sourceMap": true, - "experimentalDecorators": true, - "pretty": true, - "noFallthroughCasesInSwitch": true, - "noImplicitReturns": true, - "forceConsistentCasingInFileNames": true - }, - "include": ["*.ts", "**/*.ts"] -} diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c21bcd10..823e074e7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,20 +1,5 @@ version: 2.1 -commands: - update_node_version: - description: 'A very simple command to update the node version' - steps: - - run: - name: Link nvm - command: | - echo 'export NVM_DIR="/opt/circleci/.nvm"' >> $BASH_ENV - echo ' [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"' >> $BASH_ENV - - run: - name: Update node version - command: | - nvm install v14.16.1 - nvm alias default v14.16.1 - references: .workspace_root: &workspace_root /tmp/repo @@ -23,7 +8,7 @@ references: at: *workspace_root .image_client: &image_client - image: circleci/node:14-browsers + image: circleci/node:16-browsers .working_directory_root: &working_directory_root working_directory: *workspace_root @@ -63,17 +48,19 @@ jobs: # Download and cache dependencies - restore_cache: + name: Restore Yarn Package Cache keys: - - v1-dependencies-{{ checksum "package.json" }} - # fallback to using the latest cache if no exact match is found - - v1-dependencies- + - yarn-packages-{{ checksum "yarn.lock" }} - - run: yarn install + - run: + name: Install Dependencies + command: yarn --frozen-lockfile --cache-folder ~/.cache/yarn - save_cache: + name: Save Yarn Package Cache + key: yarn-packages-{{ checksum "yarn.lock" }} paths: - - node_modules - key: v1-dependencies-{{ checksum "package.json" }} + - ~/.cache/yarn # persist to workspace to use in downstream jobs - persist_to_workspace: @@ -93,77 +80,74 @@ jobs: - <<: *attach_root - run: name: Build lighthouse - command: npx bazel build //comms/lighthouse:server --test_output=all - - run: - name: Run tests - command: npx bazel test //comms/lighthouse:unit_test --test_output=all - - build-peer: - <<: *base_env - steps: - - <<: *attach_root + command: yarn workspace @catalyst/lighthouse-server build - run: name: Run tests - command: npx bazel test //comms/peer:config_test --test_output=all + command: yarn workspace @catalyst/lighthouse-server test + environment: + CI: 'true' build-content: - machine: - docker_layer_caching: true + docker: + - <<: *image_client + - image: circleci/postgres:12 + command: postgres -c max_connections=300 + environment: + POSTGRES_DB: postgres_test + POSTGRES_PASSWORD: '12345678' + POSTGRES_USER: postgres working_directory: *workspace_root steps: - <<: *attach_root - - update_node_version - run: name: Run unit tests - command: npx bazel test //content:unit_test --test_output=all + command: yarn workspace @catalyst/content-server test:unit + environment: + CI: 'true' - run: name: Run integration tests - command: npx bazel test //content:integration_test --test_output=all --test_timeout 600 + command: yarn workspace @catalyst/content-server test:integration + environment: + CI: 'true' build-lambdas: <<: *base_env steps: - <<: *attach_root + - run: + name: Build content + command: yarn workspace @catalyst/lambdas-server build - run: name: Run tests - command: npx bazel test //lambdas:unit_test --test_output=all + command: yarn workspace @catalyst/lambdas-server test build-commons: <<: *base_env steps: - <<: *attach_root - run: - name: Run servers tests - command: npx bazel test //commons/servers:unit_test --test_output=all - - run: - name: Run utils tests - command: npx bazel test //commons/utils:unit_test --test_output=all - - publish-peer: - <<: *base_env - steps: - - <<: *attach_root - - run: - name: Set up NPM access tokens - command: echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > ~/.npmrc + name: Build commons + command: yarn workspace @catalyst/commons build - run: - name: Publish peer - command: npx bazel run //comms/peer:package.publish + name: Run commons tests + command: yarn workspace @catalyst/commons test publish-docker: <<: *base_env docker: - - image: circleci/node:14-browsers + - image: circleci/node:16-browsers environment: &ENVIRONMENT DOCKER_IMAGE_NAME: decentraland/katalyst steps: - <<: *attach_root - setup_remote_docker: { docker_layer_caching: true, version: 20.10.2 } - - run: - name: Prepare to build Docker image - command: ./prepare_for_docker_image.sh - run: name: Build Docker image - command: docker build -t ${DOCKER_IMAGE_NAME}:${CIRCLE_SHA1} . + command: | + if echo "${CIRCLE_TAG}" | grep "^[0-9]\+\.[0-9]\+\.[0-9]\+$"; then + DOCKER_BUILDKIT=1 docker build -t ${DOCKER_IMAGE_NAME}:${CIRCLE_SHA1} --build-arg COMMIT_HASH=${CIRCLE_SHA1} --build-arg CATALYST_VERSION=${CIRCLE_TAG} . + else + DOCKER_BUILDKIT=1 docker build -t ${DOCKER_IMAGE_NAME}:${CIRCLE_SHA1} --build-arg COMMIT_HASH=${CIRCLE_SHA1} . + fi - run: name: Log into DockerHub command: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}" @@ -189,18 +173,20 @@ jobs: publish-docker-without-tag: <<: *base_env docker: - - image: circleci/node:14-browsers + - image: circleci/node:16-browsers environment: &ENVIRONMENT DOCKER_IMAGE_NAME: decentraland/katalyst steps: - <<: *attach_root - setup_remote_docker: { docker_layer_caching: true, version: 20.10.2 } - - run: - name: Prepare to build Docker image - command: ./prepare_for_docker_image.sh - run: name: Build Docker image - command: docker build -t ${DOCKER_IMAGE_NAME}:${CIRCLE_SHA1} . + command: | + if echo "${CIRCLE_TAG}" | grep "^[0-9]\+\.[0-9]\+\.[0-9]\+$"; then + DOCKER_BUILDKIT=1 docker build -t ${DOCKER_IMAGE_NAME}:${CIRCLE_SHA1} --build-arg COMMIT_HASH=${CIRCLE_SHA1} --build-arg CATALYST_VERSION=${CIRCLE_TAG} . + else + DOCKER_BUILDKIT=1 docker build -t ${DOCKER_IMAGE_NAME}:${CIRCLE_SHA1} --build-arg COMMIT_HASH=${CIRCLE_SHA1} . + fi - run: name: Log into DockerHub command: docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}" @@ -224,10 +210,6 @@ workflows: - checkout # build packages - - build-peer: - <<: *all_branches_and_tags - requires: - - linter - build-lighthouse: <<: *all_branches_and_tags requires: @@ -239,27 +221,11 @@ workflows: - build-lambdas: <<: *all_branches_and_tags requires: - - linter + - checkout - build-commons: <<: *all_branches_and_tags requires: - - linter - - # publish peer library - - accept-publish-peer: - <<: *master_and_tags - type: approval - requires: - - build-peer - # build-peer is the only real dependency. But we wait to reduce possibilities of bugs - # in other pipelines - - build-lighthouse - - build-content - - build-lambdas - - publish-peer: - <<: *master_and_tags - requires: - - accept-publish-peer + - checkout # publish docker image in "master" and semver tags # master publishes to @next @@ -267,7 +233,6 @@ workflows: - publish-docker: <<: *master_and_tags requires: - - build-peer - build-lighthouse - build-content - build-lambdas @@ -277,7 +242,6 @@ workflows: <<: *testing_branches type: approval requires: - - build-peer - build-lighthouse - build-content - build-lambdas diff --git a/.dclignore b/.dclignore deleted file mode 100644 index 22c8b7945..000000000 --- a/.dclignore +++ /dev/null @@ -1,12 +0,0 @@ -.* -package.json -package-lock.json -yarn-lock.json -build.json -export -tsconfig.json -node_modules -*.ts -*.tsx -Dockerfile -dist \ No newline at end of file diff --git a/.dockerignore b/.dockerignore index f71ea0b5c..e01e7b222 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,9 +1,7 @@ dist -bazel-out .git .gitignore .vscode node_modules *.log tmpbin -comms/performance-test \ No newline at end of file diff --git a/.eslintignore b/.eslintignore index 5488e2c10..4ff835d13 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,8 +1,9 @@ # We need to skip that file as it makes eslint to hang -comms/peer/src/Peer.ts -status/**/* -comms/lighthouse/rollup.config.js -dist -bazel-out +dist/ +node_modules/ +**/dist **/node_modules tmpbin +content/test +lambdas/test +comms/lighthouse/test diff --git a/.eslintrc.json b/.eslintrc.json index ac452fb4c..6555e160d 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,19 +1,25 @@ { - "parser": "@typescript-eslint/parser", // Specifies the ESLint parser "parserOptions": { "ecmaVersion": 2020, // Allows for the parsing of modern ECMAScript features - "sourceType": "module" // Allows for the use of imports + "sourceType": "module", // Allows for the use of imports + "project": [ + // Specify it only for TypeScript files + "./contracts/tsconfig.json", + "./commons/tsconfig.json", + "./comms/lighthouse/tsconfig.json", + "./content/tsconfig.json", + "./lambdas/tsconfig.json" + ] }, "extends": [ "plugin:@typescript-eslint/recommended", // Uses the recommended rules from the @typescript-eslint/eslint-plugin - "prettier/@typescript-eslint", // Uses eslint-config-prettier to disable ESLint rules from @typescript-eslint/eslint-plugin that would conflict with prettier "plugin:prettier/recommended" // Enables eslint-plugin-prettier and eslint-config-prettier. This will display prettier errors as ESLint errors. Make sure this is always the last configuration in the extends array. - ], "rules": { // Place to specify ESLint rules. Can be used to overwrite rules specified from the extended configs "@typescript-eslint/no-inferrable-types": 0, "@typescript-eslint/no-empty-function": "off", - "@typescript-eslint/no-explicit-any": "off" + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-floating-promises": 2 } } diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..c96011c0e --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 5 diff --git a/.gitignore b/.gitignore index 4eb29665e..4805b56a6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ .env dist -bazel-out **/node_modules yarn-error.log **/package-lock.json @@ -10,4 +9,5 @@ local/certbot/conf/** local/nginx/conf.d/00-katalyst.conf tmpbin linked-peer-package -.husky +**/tsconfig.tsbuildinfo +storage_*/** diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index 20b22d18c..000000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,15 +0,0 @@ -image: decentraland/ci-node:12 - -build: - only: - - master - - staging - - release - script: - - cd status && yarn install && PUBLIC_URL=https://catalysts.decentraland.io yarn build && cd .. - - dcl-lock-sync - - cd .ci && npm install && dcl-up website-catalysts - - dcl-sync-release && cd .. - - dcl-upload status/build - - cd .ci && dcl-cache-invalidation - diff --git a/.husky/.gitignore b/.husky/.gitignore new file mode 100644 index 000000000..31354ec13 --- /dev/null +++ b/.husky/.gitignore @@ -0,0 +1 @@ +_ diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 000000000..d2ae35e84 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn lint-staged diff --git a/.nvmrc b/.nvmrc index 6b17d228d..58a4133d9 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -14.16.1 +16.13.0 diff --git a/.prettierignore b/.prettierignore index 95d818ae7..1934b7034 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,5 +1,4 @@ dist -bazel-out tmpbin linked-peer-package -**/*.json \ No newline at end of file +**/*.json diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..5032034af --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "dbaeumer.vscode-eslint", + "esbenp.prettier-vscode", + "hbenl.vscode-jasmine-test-adapter" + ] +} diff --git a/.vscode/launch.json b/.vscode/launch.json index 78d0a024f..378b006f4 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -4,6 +4,7 @@ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 "version": "0.2.0", "configurations": [ + { "type": "node", "request": "attach", @@ -23,7 +24,10 @@ "program": "${workspaceFolder}/lighthouse:server", "outFiles": [ "${workspaceFolder}/**/*.js" - ] + ], + "env": { + "CI": "true" + } } ] -} \ No newline at end of file +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 4deaf691a..17934ee93 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,25 +1,33 @@ { - "files.exclude": { - "bazel*": true, - "node_modules": true, - "data*": true, - "dist/**/*": true - }, - "typescript.tsdk": "node_modules/typescript/lib", - "editor.formatOnSave": true, - "cSpell.words": [ - "Ethereum", - "comms" - ], - "editor.rulers": [120], - "editor.codeActionsOnSave": { - "source.fixAll.eslint": true, - "source.organizeImports": true - }, - "eslint.validate": [ - "typescript" - ], - "files.insertFinalNewline": true, - "files.trimTrailingWhitespace": true, - "files.trimFinalNewlines": true, + "files.exclude": { + "**/dist/": true, + "**/node_modules/": true + }, + "typescript.tsdk": "node_modules/typescript/lib", + "editor.formatOnSave": true, + "cSpell.words": [ + "comms", + "Ethereum", + "IPFS", + "Metaverse", + "openapi" + ], + "editor.rulers": [ + 120 + ], + "editor.codeActionsOnSave": { + "source.fixAll.eslint": true, + "source.organizeImports": true + }, + "eslint.validate": [ + "typescript" + ], + "files.insertFinalNewline": true, + "files.trimTrailingWhitespace": true, + "files.trimFinalNewlines": true, + "jasmineExplorer.nodeArgv": [ + "-r", + "ts-node/register" + ], + "jasmineExplorer.config": "jasmine.json" } diff --git a/BUILD.bazel b/BUILD.bazel deleted file mode 100644 index 9819940b0..000000000 --- a/BUILD.bazel +++ /dev/null @@ -1,33 +0,0 @@ -# Add rules here to build your software -# See https://docs.bazel.build/versions/master/build-ref.html#BUILD_files - -exports_files( - [ - "tsconfig.json", - "common.package.json", - ], - visibility = ["//visibility:public"], -) - -filegroup( - name = "node_modules", - srcs = glob( - include = [ - "node_modules/**/*.js", - "node_modules/**/*.d.ts", - "node_modules/**/*.json", - "node_modules/.bin/*", - ], - exclude = [ - # Files under test & docs may contain file names that - # are not legal Bazel labels (e.g., - # node_modules/ecstatic/test/public/中文/檔案.html) - "node_modules/**/test/**", - "node_modules/**/docs/**", - # Files with spaces in the name are not legal Bazel labels - "node_modules/**/* */**", - "node_modules/**/* *", - ], - ), - visibility = ["//visibility:public"], -) diff --git a/Dockerfile b/Dockerfile index d2a323f63..68fd7bfd3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,56 @@ -FROM node:14.16.1-slim - -RUN apt-get update && \ - apt-get upgrade -yq && \ - apt-get install -yq yarn git zlib1g zlib1g-dev - +FROM node:16-alpine as base WORKDIR /app +RUN apk add --no-cache bash -COPY entrypoint.sh . +COPY package.json . +COPY yarn.lock . +COPY comms/lighthouse/package.json comms/lighthouse/ +COPY commons/package.json commons/ +COPY contracts/package.json contracts/ +COPY content/package.json content/ +COPY lambdas/package.json lambdas/ -WORKDIR /app/build +# get production dependencies +FROM base as dependencies +RUN yarn install --prod --frozen-lockfile -COPY . . +# build sources +FROM base as catalyst-builder +RUN yarn install --frozen-lockfile -# The following are all collapsed to reduce image size -RUN yarn install &&\ - yarn bazel clean &&\ - yarn bazel build //comms/lighthouse:server &&\ - yarn bazel build //content:server &&\ - yarn bazel build //lambdas:server &&\ - cp -L -R dist/bin/ ../bin &&\ - yarn bazel clean --expunge && yarn cache clean &&\ - cd .. &&\ - rm -rf build &&\ - rm -rf /root/.cache/bazel +COPY . . +FROM catalyst-builder as comms-builder +RUN yarn workspace @catalyst/lighthouse-server build +FROM catalyst-builder as content-builder +RUN yarn workspace @catalyst/content-server build +FROM catalyst-builder as lambdas-builder +RUN yarn workspace @catalyst/lambdas-server build -WORKDIR /app +# build final image with transpiled code and runtime dependencies +FROM base -ENV COMMIT_HASH=bc34832282cfa746cfb1f27184cf3b53f321a164 -ENV CATALYST_VERSION=1.2.0 +COPY entrypoint.sh . +COPY --from=dependencies /app/node_modules ./node_modules/ +COPY --from=dependencies /app/commons/node_modules ./node_modules/ +COPY --from=dependencies /app/contracts/node_modules ./node_modules/ +COPY --from=dependencies /app/comms/lighthouse/node_modules ./node_modules/ +COPY --from=dependencies /app/content/node_modules ./node_modules/ +# uncomment this if lambdas eventually get some dependencies there +# COPY --from=dependencies /app/lambdas/node_modules ./node_modules/ + +COPY --from=content-builder /app/contracts/dist contracts/ +COPY --from=content-builder /app/commons/dist commons/ +COPY --from=content-builder /app/content/dist/src content/ +COPY --from=comms-builder /app/comms/lighthouse/dist/src comms/lighthouse/ +COPY --from=lambdas-builder /app/lambdas/dist/src lambdas/ + +# https://docs.docker.com/engine/reference/builder/#arg +ARG CATALYST_VERSION=3.0.0-ci +ENV CATALYST_VERSION=${CATALYST_VERSION:-3.0.0} + +# https://docs.docker.com/engine/reference/builder/#arg +ARG COMMIT_HASH=local +ENV COMMIT_HASH=${COMMIT_HASH:-local} EXPOSE 6969 EXPOSE 7070 diff --git a/status/LICENSE b/LICENSE similarity index 99% rename from status/LICENSE rename to LICENSE index 755956367..5d0eeae6c 100644 --- a/status/LICENSE +++ b/LICENSE @@ -178,7 +178,7 @@ APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" + boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 dcl + Copyright 2021 Decentraland Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -199,4 +199,3 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - diff --git a/README.md b/README.md index d1336f0bd..ee904513e 100644 --- a/README.md +++ b/README.md @@ -12,21 +12,35 @@ If you just want to run a Catalyst server, please check the [Catalyst Owner](htt - [Content Server](content) - [Lighthouse](comms) +- [Lambdas](lambdas) +- [PoW](https://github.com/decentraland/pow-authorization-server) -## Monitoring +## Catalyst API -For monitoring see [the following doc](docs/MONITORING.md) +This Server implements the v1 of the API Specification detailed [here](https://github.com/decentraland/catalyst-api-specs) -## Contributions +## Monitoring -If using Visual Studio, please download: +For monitoring see [the following doc](docs/MONITORING.md) -- [`prettier` extension](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode) -- [`eslint` extension](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) +## Tests -### Install Husky ``` -yarn add husky -yarn husky install -yarn husky add .husky/pre-commit "yarn lint-staged" +yarn build +yarn test ``` + +## Contributing + +### [Code of Conduct](https://github.com/decentraland/catalyst/blob/master/docs/CODE_OF_CONDUCT.md) + +Please read [the full text](https://github.com/decentraland/catalyst/blob/master/docs/CODE_OF_CONDUCT.md) so that you can understand what actions will and will not be tolerated. + +### [Contributing Guide](https://github.com/decentraland/catalyst/blob/master/docs/CONTRIBUTING.md) + +Read our [contributing guide](https://github.com/decentraland/catalyst/blob/master/docs/CONTRIBUTING.md) to learn about our development process, how to propose bugfixes and improvements, and how to build and test your changes. + +## Release + +- Create a tag release in Git +- It will trigger the CI job which publishes a new docker image version under `@latest` tag diff --git a/WORKSPACE b/WORKSPACE deleted file mode 100644 index 96a08cd9a..000000000 --- a/WORKSPACE +++ /dev/null @@ -1,121 +0,0 @@ -# Bazel workspace created by @bazel/create 0.41.0 - -# Declares that this directory is the root of a Bazel workspace. -# See https://docs.bazel.build/versions/master/build-ref.html#workspace -workspace( - # How this workspace would be referenced with absolute labels from another workspace - name = "katalyst", - # Map the @npm bazel workspace to the node_modules directory. - # This lets Bazel use the same node_modules as other local tooling. - managed_directories = {"@npm": ["node_modules"]}, -) - -# Install the nodejs "bootstrap" package -# This provides the basic tools for running and packaging nodejs programs in Bazel -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - -http_archive( - name = "build_bazel_rules_nodejs", - sha256 = "a54b2511d6dae42c1f7cdaeb08144ee2808193a088004fc3b464a04583d5aa2e", - urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/0.42.3/rules_nodejs-0.42.3.tar.gz"], -) - -# The yarn_install rule runs yarn anytime the package.json or yarn.lock file changes. -# It also extracts and installs any Bazel rules distributed in an npm package. -load("@build_bazel_rules_nodejs//:index.bzl", "node_repositories", "yarn_install") - -node_repositories( - node_repositories = { - "14.16.1-darwin_amd64": ("node-v14.16.1-darwin-x64.tar.gz", "node-v14.16.1-darwin-x64", "b762b72fc149629b7e394ea9b75a093cad709a9f2f71480942945d8da0fc1218"), - "14.16.1-linux_amd64": ("node-v14.16.1-linux-x64.tar.xz", "node-v14.16.1-linux-x64", "85a89d2f68855282c87851c882d4c4bbea4cd7f888f603722f0240a6e53d89df"), - "14.16.1-windows_amd64": ("node-v14.16.1-win-x64.zip", "node-v14.16.1-win-x64", "e469db37b4df74627842d809566c651042d86f0e6006688f0f5fe3532c6dfa41"), - }, - node_version = "14.16.1", -) - -yarn_install( - # Name this npm so that Bazel Label references look like @npm//package - name = "npm", - package_json = "//:package.json", - yarn_lock = "//:yarn.lock", -) - -# Install any Bazel rules which were extracted earlier by the yarn_install rule. -load("@npm//:install_bazel_dependencies.bzl", "install_bazel_dependencies") - -install_bazel_dependencies() - -# Set up TypeScript toolchain -load("@npm_bazel_typescript//:index.bzl", "ts_setup_workspace") - -ts_setup_workspace() - -# Fetch transitive Bazel dependencies of npm_bazel_karma -load("@npm_bazel_karma//:package.bzl", "npm_bazel_karma_dependencies") - -npm_bazel_karma_dependencies() - -# Set up web tests -http_archive( - name = "io_bazel_rules_webtesting", - sha256 = "9bb461d5ef08e850025480bab185fd269242d4e533bca75bfb748001ceb343c3", - urls = [ - "https://github.com/bazelbuild/rules_webtesting/releases/download/0.3.3/rules_webtesting.tar.gz", - ], -) - -load("@io_bazel_rules_webtesting//web:repositories.bzl", "web_test_repositories") -load("@io_bazel_rules_webtesting//web/versioned:browsers-0.3.2.bzl", "browser_repositories") - -web_test_repositories() - -browser_repositories( - chromium = True, -) - -# Set up necessary Go tools for web testing -http_archive( - name = "io_bazel_rules_go", - sha256 = "b9aa86ec08a292b97ec4591cf578e020b35f98e12173bbd4a921f84f583aebd9", - urls = [ - "https://storage.googleapis.com/bazel-mirror/github.com/bazelbuild/rules_go/releases/download/v0.20.2/rules_go-v0.20.2.tar.gz", - "https://github.com/bazelbuild/rules_go/releases/download/v0.20.2/rules_go-v0.20.2.tar.gz", - ], -) - -load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies") - -go_rules_dependencies() - -go_register_toolchains() - -#Protobuf - -http_archive( - name = "rules_proto", - sha256 = "602e7161d9195e50246177e7c55b2f39950a9cf7366f74ed5f22fd45750cd208", - strip_prefix = "rules_proto-97d8af4dc474595af3900dd85cb3a29ad28cc313", - urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz", - "https://github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz", - ], -) - -load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains") - -rules_proto_dependencies() - -rules_proto_toolchains() - -http_archive( - name = "rules_typescript_proto", - sha256 = "0c76ae0d04eaa4d4c5f12556615cb70d294082ee672aee6dd849fea4ec2075ee", - strip_prefix = "rules_typescript_proto-0.0.3", - urls = [ - "https://github.com/Dig-Doug/rules_typescript_proto/archive/0.0.3.tar.gz", - ], -) - -load("@rules_typescript_proto//:index.bzl", "rules_typescript_proto_dependencies") - -rules_typescript_proto_dependencies() diff --git a/common.package.json b/common.package.json deleted file mode 100644 index 737933977..000000000 --- a/common.package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "version": "$LH_VERSION", - "publishConfig": { - "access": "public" - } -} \ No newline at end of file diff --git a/commons/index.ts b/commons/index.ts new file mode 100644 index 000000000..dec451ad0 --- /dev/null +++ b/commons/index.ts @@ -0,0 +1,4 @@ +export * from './servers' +export * from './test-utils' +export * from './utils/Positions' +export * from './utils/util' diff --git a/commons/jasmine.json b/commons/jasmine.json new file mode 100644 index 000000000..d50dd98d8 --- /dev/null +++ b/commons/jasmine.json @@ -0,0 +1,5 @@ +{ + "helpers": ["test-utils/helpers.ts"], + "spec_dir": "", + "spec_files": ["**/*[sS]pec.ts"] +} diff --git a/commons/package.json b/commons/package.json new file mode 100644 index 000000000..7df1738d1 --- /dev/null +++ b/commons/package.json @@ -0,0 +1,38 @@ +{ + "name": "@catalyst/commons", + "description": "Commons", + "version": "0.1.0", + "private": true, + "main": "dist/index.js", + "types": "dist/index.d.ts", + "author": "Decentraland Contributors", + "license": "Apache-2.0", + "scripts": { + "cleanup": "shx rm -rf dist node_modules", + "build": "tsc -b", + "test": "jasmine-ts --config=jasmine.json" + }, + "dependencies": { + "@catalyst/contracts": "^0.1.0", + "@well-known-components/http-server": "^1.1.1", + "@well-known-components/interfaces": "^1.1.0", + "@well-known-components/metrics": "^1.1.2", + "dcl-crypto": "2.3.0", + "express": "4.17.1", + "jasmine-spec-reporter": "7.0.0", + "prom-client": "13.2.0", + "qs": "6.10.1", + "web3x": "4.0.6" + }, + "devDependencies": { + "@types/express": "4.17.13", + "@types/jasmine": "3.9.1", + "@types/node": "16.7.10", + "@types/qs": "^6", + "jasmine": "3.9.0", + "jasmine-core": "3.9.0", + "jasmine-ts": "0.4.0", + "ts-mockito": "2.6.1", + "ts-node": "10.2.1" + } +} diff --git a/commons/servers/BUILD.bazel b/commons/servers/BUILD.bazel deleted file mode 100644 index fd28eedf6..000000000 --- a/commons/servers/BUILD.bazel +++ /dev/null @@ -1,44 +0,0 @@ -load("@npm_bazel_typescript//:index.bzl", "ts_library") - -package(default_visibility = ["//visibility:public"]) - -ts_library( - name = "servers", - srcs = glob(["*.ts"]), - module_name = "decentraland-katalyst-commons", - tsconfig = "//:tsconfig.json", - deps = [ - "//contracts", - "@npm//@types", - "@npm//dcl-crypto", - "@npm//dcl-catalyst-commons", - "@npm//express", - "@npm//prom-client", - "@npm//response-time", - "@npm//web3x", - ], -) - -ts_library( - name = "tests_unit", - testonly = 1, - srcs = glob(["test/**/*.ts"]), - tsconfig = "//:tsconfig.json", - deps = [ - "//contracts", - ":servers", - "@npm//@types/jasmine", - "@npm//@types/node", - "@npm//ts-mockito", - ], -) - -load("@npm_bazel_jasmine//:index.bzl", "jasmine_node_test") - -jasmine_node_test( - name = "unit_test", - deps = [ - ":tests_unit", - "@npm//jasmine", - ], -) diff --git a/commons/servers/DAOClient.ts b/commons/servers/DAOClient.ts index 0f6f8f222..cbacb753a 100644 --- a/commons/servers/DAOClient.ts +++ b/commons/servers/DAOClient.ts @@ -1,4 +1,4 @@ -import { CatalystData, CatalystId, DAOContract } from 'decentraland-katalyst-contracts/DAOContract' +import { CatalystData, CatalystId, DAOContract } from '@catalyst/contracts' import { ServerMetadata } from './ServerMetadata' export interface DAOClient { @@ -16,7 +16,7 @@ export class DAOContractClient { async getAllContentServers(): Promise> { const servers: Set = await this.getAllServers() - return new Set(Array.from(servers.values()).map((server) => ({ ...server, address: server.address + '/content' }))) + return new Set(Array.from(servers.values()).map((server) => ({ ...server, baseUrl: server.baseUrl + '/content' }))) } async getAllServers(): Promise> { @@ -56,17 +56,17 @@ export class DAOContractClient { private toMetadata(data: CatalystData): ServerMetadata | undefined { const { id, owner, domain } = data - let address = domain.trim() + let baseUrl = domain.trim() - if (address.startsWith('http://')) { - console.warn(`Catalyst node domain using http protocol, skipping ${address}`) + if (baseUrl.startsWith('http://')) { + console.warn(`Catalyst node domain using http protocol, skipping ${baseUrl}`) return undefined } - if (!address.startsWith('https://')) { - address = 'https://' + address + if (!baseUrl.startsWith('https://')) { + baseUrl = 'https://' + baseUrl } - return { address, owner, id } + return { baseUrl, owner, id } } } diff --git a/commons/servers/ServerMetadata.ts b/commons/servers/ServerMetadata.ts index aa4fcee89..99b7a144d 100644 --- a/commons/servers/ServerMetadata.ts +++ b/commons/servers/ServerMetadata.ts @@ -1,6 +1,9 @@ import { EthAddress } from 'dcl-crypto' + export type ServerMetadata = { - address: string + baseUrl: string owner: EthAddress id: string } + +export declare type ServerBaseUrl = string diff --git a/commons/servers/asyncHandler.ts b/commons/servers/asyncHandler.ts new file mode 100644 index 000000000..ccfa1cdca --- /dev/null +++ b/commons/servers/asyncHandler.ts @@ -0,0 +1,12 @@ +import { NextFunction, Request, RequestHandler, Response } from 'express' + +export function asyncHandler( + handler: (req: Request, res: Response, next: NextFunction) => Promise +): RequestHandler { + return (req, res, next) => { + handler(req, res, next).catch((e) => { + console.error(`Unexpected error while performing request ${req.method} ${req.originalUrl}`, e) + res.status(500).send({ status: 'server-error', message: 'Unexpected error' }) + }) + } +} diff --git a/commons/servers/fsutils.ts b/commons/servers/fsutils.ts index 710629972..4c1a1cd42 100644 --- a/commons/servers/fsutils.ts +++ b/commons/servers/fsutils.ts @@ -2,7 +2,7 @@ import fs from 'fs' export async function existPath(path: string): Promise { try { - await fs.promises.access(path, fs.constants.F_OK | fs.constants.W_OK) + await fs.promises.access(path, fs.constants.F_OK | fs.constants.R_OK) return true } catch (error) { return false diff --git a/commons/servers/handlers.ts b/commons/servers/handlers.ts index 9e91f9b03..ecfd8250e 100644 --- a/commons/servers/handlers.ts +++ b/commons/servers/handlers.ts @@ -1,5 +1,5 @@ -import { Authenticator, EthAddress } from 'dcl-crypto' -import { NextFunction, Request, RequestHandler, Response } from 'express-serve-static-core' +import { Authenticator, AuthLink, EthAddress } from 'dcl-crypto' +import { NextFunction, Request, RequestHandler, Response } from 'express' import { EthereumProvider } from 'web3x/providers' import { SignatureValidator, SignerData, validateSignature } from './signatures' @@ -16,10 +16,10 @@ export function validateSignatureHandler( signerDataBuilder: (body: any) => SignerData = (b) => b, signatureValidator: SignatureValidator = Authenticator.validateSignature ): RequestHandler { - return async (req: Request, res: Response, next: NextFunction) => { + return (req: Request, res: Response, next: NextFunction) => { const signerData = signerDataBuilder(req.body) - await validateSignature( + validateSignature( signerData, `${messageToSignBuilder(req.body)}${signerData.timestamp}`, next, @@ -27,6 +27,76 @@ export function validateSignatureHandler( (signer) => authorizedSignerPredicate(signer, req.body), networkOrProvider, signatureValidator - ) + ).catch(next) } } +/** + * Validates signature using header parameters. This should work with Kernel's Signed Fetch + */ +export function validateSignatureFromHeaderHandler( + networkOrProvider: string | EthereumProvider, + authorizedSignerPredicate: (signer: EthAddress | undefined, body: any) => boolean = (_, __) => true, + signatureValidator: SignatureValidator = Authenticator.validateSignature +): RequestHandler { + return (req: Request, res: Response, next: NextFunction) => { + const [chain, timestamp, metadata] = buildAuthChainFromHeaders(req) + + if (!timestamp) { + res.status(401).send({ + status: 'unauthorized', + message: `Timestamp must be provided with the header ${AUTH_TIMESTAMP_HEADER}` + }) + return + } + + const signerData: SignerData = { + authChain: chain, + timestamp + } + + const payloadParts = [req.method.toLowerCase(), req.originalUrl.toLowerCase(), timestamp.toString(), metadata] + const signaturePayload = payloadParts.join(':').toLowerCase() + + validateSignature( + signerData, + signaturePayload, + () => { + req.params.address = chain[0].payload + req.params.authMetadata = metadata + next() + }, + (message) => res.status(401).send({ status: 'unauthorized', message }), + (signer) => authorizedSignerPredicate(signer, req.body), + networkOrProvider, + signatureValidator + ).catch(next) + } +} + +export const AUTH_CHAIN_HEADER_PREFIX = 'x-identity-auth-chain-' +export const AUTH_TIMESTAMP_HEADER = 'x-identity-timestamp' +export const AUTH_METADATA_HEADER = 'x-identity-metadata' + +// We support up to 10 links in authchain. +function getAuthChainHeaders() { + return [...new Array(10).keys()].map((idx) => `${AUTH_CHAIN_HEADER_PREFIX}${idx}`) +} + +export const authHeaders = [AUTH_METADATA_HEADER, AUTH_TIMESTAMP_HEADER, ...getAuthChainHeaders()] + +function extractIndex(header: string) { + return parseInt(header.substring(AUTH_CHAIN_HEADER_PREFIX.length), 10) +} + +function buildAuthChainFromHeaders(req: Request): [AuthLink[], number | undefined, string] { + const chain = Object.keys(req.headers) + .filter((header) => header.includes(AUTH_CHAIN_HEADER_PREFIX)) + .sort((a, b) => (extractIndex(a) > extractIndex(b) ? 1 : -1)) + .map((header) => JSON.parse(req.headers[header] as string) as AuthLink) + + const timestampString = req.header(AUTH_TIMESTAMP_HEADER) + const metadata = req.header(AUTH_METADATA_HEADER) + + const timestamp = timestampString ? parseInt(timestampString, 10) : undefined + return [chain, timestamp, metadata ?? ''] +} diff --git a/commons/servers/index.ts b/commons/servers/index.ts new file mode 100644 index 000000000..bd2f74d12 --- /dev/null +++ b/commons/servers/index.ts @@ -0,0 +1,9 @@ +export * from './addresses' +export * from './asyncHandler' +export * from './DAOClient' +export * from './fsutils' +export * from './handlers' +export * from './metrics' +export * from './QueryParameters' +export * from './ServerMetadata' +export * from './signatures' diff --git a/commons/servers/metrics.ts b/commons/servers/metrics.ts index 640c6ca90..37b70c4d2 100644 --- a/commons/servers/metrics.ts +++ b/commons/servers/metrics.ts @@ -1,60 +1,100 @@ -import express, { RequestHandler } from 'express' -import { collectDefaultMetrics, Counter, register as Register, Summary } from 'prom-client' -import ResponseTime from 'response-time' - -const pathsTaken = new Counter({ - name: 'paths_taken', - help: 'Paths taken in the app', - labelNames: ['path'] -}) - -const responses = new Summary({ - name: 'http_responses', - help: 'Response time in milliseconds', - labelNames: ['method', 'path', 'status'] -}) - -const numRequests = new Counter({ - name: 'num_requests', - help: 'Number of requests made', - labelNames: ['method'] -}) - -const port = parseInt(process.env.METRICS_PORT ?? '9090') -let requestMetricsHandlers: RequestHandler[] = [] - -export class Metrics { - static initialize() { - const metricsServer = express() - this.injectMetricsRoute(metricsServer) - this.startCollection(metricsServer) - requestMetricsHandlers = [this.requestCounters, this.responseCounters] +import { IMetricsComponent } from '@well-known-components/interfaces' +import { HttpMetrics } from '@well-known-components/metrics/dist/http' +import express from 'express' +import { collectDefaultMetrics, Registry } from 'prom-client' + +// Due to security reasons, metrics have their own endpoint and server +export function initializeMetricsServer( + serverToInstrument: express.Express, + metricsComponent: IMetricsComponent & { + register: Registry } +) { + const metricsExpressApp = express() + + const register = metricsComponent.register + + addMetricsEndpointToServer(metricsExpressApp, register) - static requestHandlers(): RequestHandler[] { - return requestMetricsHandlers + // due to the hardcoded nature of our "global instance of metricsComponent" + // running tests and registering default metrics twice breaks the execution + // that way we disable the default metrics for CI environments + if (process.env.CI !== 'true' && process.env.RUNNING_TESTS !== 'true') { + collectDefaultMetrics({ register }) } - private static requestCounters = function (req, _res, next) { - numRequests.inc({ method: req.method }) - pathsTaken.inc({ path: req.baseUrl + req.route.path }) - next() + installMetricsMiddlewares(serverToInstrument, metricsComponent) + + let server: { close(cb: (err?: any) => void): void } | undefined + + return { + async start(port?: number) { + const usedPort = port === undefined ? parseInt(process.env.METRICS_PORT ?? '9090') : port + if (isNaN(usedPort)) { + throw new Error('Invalid non-numeric METRICS_PORT') + } + console.log(`Starting the collection of metrics, the metrics are available on :${usedPort}/metrics`) + + server = metricsExpressApp.listen(usedPort) + }, + async stop() { + await new Promise((resolve, reject) => { + if (server) { + server!.close((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + server = undefined + } + }) + } } +} - private static responseCounters = ResponseTime(function (req, res, time) { - responses.labels(req.method, req.baseUrl + req.route.path, res.statusCode).observe(time) +function addMetricsEndpointToServer(app: express.Express, registry: Registry) { + app.get('/metrics', (_req: express.Request, res: express.Response) => { + registry + .metrics() + .then(($) => { + res.setHeader('content-type', registry.contentType) + res.send($) + }) + .catch((err) => { + console.error(err) + res.status(500).end() + }) }) +} - private static injectMetricsRoute(app: express.Express) { - app.get('/metrics', async (req, res) => { - res.set('Content-Type', Register.contentType) - res.end(await Register.metrics()) +// TODO: once stable, move into well-known-components/metrics/express-helpers +function installMetricsMiddlewares(app: express.Express, metricsComponent: IMetricsComponent) { + app.use(function metricsMiddleware(req, res, next) { + const labels = { + method: req.method, + handler: '', + code: 200 + } + + const { end } = metricsComponent.startTimer('http_request_duration_seconds', labels) + + res.on('finish', () => { + labels.code = (res && res.statusCode) || labels.code + + if (req.route && req.route.path) { + labels.handler = (req.baseUrl || '') + req.route.path + } + + const contentLength = res.getHeader('content-length') + if (typeof contentLength === 'string') { + metricsComponent.observe('http_request_size_bytes', labels, parseInt(contentLength, 10)) + } + metricsComponent.increment('http_requests_total', labels) + end(labels) }) - } - private static startCollection(app: express.Express) { - console.log(`Starting the collection of metrics, the metrics are available on :${port}/metrics`) - collectDefaultMetrics() - app.listen(port) - } + next() + }) } diff --git a/commons/servers/signatures.ts b/commons/servers/signatures.ts index 31adb7983..77ca60bd3 100644 --- a/commons/servers/signatures.ts +++ b/commons/servers/signatures.ts @@ -1,5 +1,5 @@ +import { httpProviderForNetwork } from '@catalyst/contracts' import { AuthChain, Authenticator, EthAddress, Signature, ValidationResult } from 'dcl-crypto' -import { httpProviderForNetwork } from 'decentraland-katalyst-contracts/utils' import { EthereumProvider } from 'web3x/providers' // We want all signatures to be "current". We consider "current" to be the current time, diff --git a/commons/servers/synchronizationState.ts b/commons/servers/synchronizationState.ts deleted file mode 100644 index 7f2c20a9c..000000000 --- a/commons/servers/synchronizationState.ts +++ /dev/null @@ -1,6 +0,0 @@ -export enum SynchronizationState { - BOOTSTRAPPING = 'Bootstrapping', - SYNCED = 'Synced', - SYNCING = 'Syncing', - FAILED_TO_SYNC = 'Failed to sync' -} diff --git a/commons/servers/test/DAOContractClient.spec.ts b/commons/servers/test/DAOContractClient.spec.ts index 5020d5458..8988a5c54 100644 --- a/commons/servers/test/DAOContractClient.spec.ts +++ b/commons/servers/test/DAOContractClient.spec.ts @@ -1,16 +1,16 @@ -import { DAOContractClient } from 'decentraland-katalyst-commons/DAOClient' -import { ServerMetadata } from 'decentraland-katalyst-commons/ServerMetadata' -import { CatalystData, CatalystId, DAOContract } from 'decentraland-katalyst-contracts/DAOContract' +import { CatalystData, CatalystId, DAOContract } from '@catalyst/contracts' import { anyNumber, anyString, instance, mock, verify, when } from 'ts-mockito' +import { DAOContractClient } from '../DAOClient' +import { ServerMetadata } from '../ServerMetadata' describe('DAOContractClient', () => { const id1: CatalystId = 'id1' const data1: CatalystData = { id: id1, owner: 'owner1', domain: 'domain.com' } - const metadata1: ServerMetadata = { id: id1, owner: 'owner1', address: 'https://domain.com' } + const metadata1: ServerMetadata = { id: id1, owner: 'owner1', baseUrl: 'https://domain.com' } const id2: CatalystId = 'id2' const data2: CatalystData = { id: id2, owner: 'owner2', domain: 'domain.com' } - const metadata2: ServerMetadata = { id: id2, owner: 'owner2', address: 'https://domain.com' } + const metadata2: ServerMetadata = { id: id2, owner: 'owner2', baseUrl: 'https://domain.com' } it(`When server was added, then changes are detected and reported`, async () => { const [, contractInstance] = contractWith([ @@ -83,10 +83,10 @@ describe('DAOContractClient', () => { const servers = await client.getAllServers() expect(servers.size).toEqual(1) - const { id, owner, address } = servers.values().next().value + const { id, owner, baseUrl } = servers.values().next().value expect(id).toEqual(id1) expect(owner).toEqual('owner') - expect(address).toEqual('https://domain.com') + expect(baseUrl).toEqual('https://domain.com') }) function contractWith(servers: [CatalystId, CatalystData][]): [DAOContract, DAOContract] { diff --git a/commons/servers/test/handlers.spec.ts b/commons/servers/test/handlers.spec.ts index 37af5673c..a6b4184aa 100644 --- a/commons/servers/test/handlers.spec.ts +++ b/commons/servers/test/handlers.spec.ts @@ -1,4 +1,4 @@ -import { validateSignatureHandler } from 'decentraland-katalyst-commons/handlers' +import { validateSignatureHandler } from '../handlers' describe('validate signature handler', () => { let request: any diff --git a/commons/test-utils/helpers.ts b/commons/test-utils/helpers.ts new file mode 100644 index 000000000..dcc35a435 --- /dev/null +++ b/commons/test-utils/helpers.ts @@ -0,0 +1,3 @@ +import { installReporter } from '.' + +installReporter() diff --git a/commons/test-utils/index.ts b/commons/test-utils/index.ts new file mode 100644 index 000000000..e782d251c --- /dev/null +++ b/commons/test-utils/index.ts @@ -0,0 +1,25 @@ +import { delay } from '../utils/util' + +export async function whileTrue( + condition: () => boolean, + messageIfFailed: string = 'no message specified', + timeout: number = 1000 +) { + const started = Date.now() + while (condition()) { + if (Date.now() - started > timeout) { + throw new Error('Timed out awaiting condition: ' + messageIfFailed) + } + await delay(5) + } +} + +export async function untilTrue( + condition: () => boolean, + messageIfFailed: string = 'no message specified', + timeout: number = 1000 +) { + await whileTrue(() => !condition(), messageIfFailed, timeout) +} + +export * from './reporter' diff --git a/commons/test-utils/reporter.ts b/commons/test-utils/reporter.ts new file mode 100644 index 000000000..3f8e0019f --- /dev/null +++ b/commons/test-utils/reporter.ts @@ -0,0 +1,17 @@ +import { SpecReporter, StacktraceOption } from 'jasmine-spec-reporter' + +export const reporter = new SpecReporter({ + summary: { + displayDuration: true, + displayStacktrace: StacktraceOption.PRETTY + }, + spec: { + displayStacktrace: StacktraceOption.RAW, + displayDuration: true + } +}) + +export const installReporter = (): void => { + jasmine.getEnv().clearReporters() + jasmine.getEnv().addReporter(reporter) +} diff --git a/commons/tsconfig.json b/commons/tsconfig.json new file mode 100644 index 000000000..d1ea420aa --- /dev/null +++ b/commons/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "." + }, + "include": ["./index.ts", "servers", "test-utils", "utils"], + "references": [ + { "path": "../contracts" } + ] +} diff --git a/commons/utils/BUILD.bazel b/commons/utils/BUILD.bazel deleted file mode 100644 index ac8208139..000000000 --- a/commons/utils/BUILD.bazel +++ /dev/null @@ -1,31 +0,0 @@ -load("@npm_bazel_typescript//:index.bzl", "ts_library") -load("@npm_bazel_jasmine//:index.bzl", "jasmine_node_test") - -package(default_visibility = ["//visibility:public"]) - -ts_library( - name = "utils", - srcs = glob(["*.ts"]), - module_name = "decentraland-katalyst-utils", - tsconfig = "//:tsconfig.json", -) - - -ts_library( - name = "tests", - testonly = 1, - srcs = glob(["test/**/*.ts"]), - tsconfig = "//:tsconfig.json", - deps = [ - ":utils", - "@npm//@types/jasmine", - ], -) - -jasmine_node_test( - name = "unit_test", - deps = [ - ":tests", - "@npm//jasmine", - ], -) \ No newline at end of file diff --git a/commons/utils/Positions.ts b/commons/utils/Positions.ts index 6d9bc989c..a1101c4fa 100644 --- a/commons/utils/Positions.ts +++ b/commons/utils/Positions.ts @@ -1,19 +1,8 @@ export const DISCRETIZE_POSITION_INTERVALS = [32, 64, 80, 128, 160] -export type Position3D = [number, number, number] -export type Position2D = [number, number] - export type Quaternion = [number, number, number, number] -export type Position = Position2D | Position3D - -export function isPosition3D(position: any): position is Position3D { - return position instanceof Array && position.length === 3 -} - -export function isPosition2D(position: any): position is Position2D { - return position instanceof Array && position.length === 2 -} +export type Position3D = [number, number, number] /** * Calculates the discretized distance between position a and position b, using the provided intervals (DISCRETIZE_POSITION_INTERVALS as default) @@ -28,20 +17,16 @@ export function isPosition2D(position: any): position is Position2D { * * The @param intervals provided should be ordered from lower to greater */ -export function discretizedPositionDistance(intervals: number[] = DISCRETIZE_POSITION_INTERVALS) { - return (a: Position, b: Position) => { +export function discretizedPositionDistanceXZ(intervals: number[] = DISCRETIZE_POSITION_INTERVALS) { + return (a: Position3D, b: Position3D) => { let dx = 0 - let dy = 0 let dz = 0 dx = a[0] - b[0] - dy = a[1] - b[1] - if (isPosition3D(a) && isPosition3D(b)) { - dz = a[2] - b[2] - } + dz = a[2] - b[2] - const squaredDistance = dx * dx + dy * dy + dz * dz + const squaredDistance = dx * dx + dz * dz const intervalIndex = intervals.findIndex((it) => squaredDistance <= it * it) @@ -52,5 +37,5 @@ export function discretizedPositionDistance(intervals: number[] = DISCRETIZE_POS export type PeerConnectionHint = { id: string distance: number - position: Position + position: Position3D } diff --git a/commons/utils/test/Positions.spec.ts b/commons/utils/test/Positions.spec.ts index bf29ae017..7d5f4aec3 100644 --- a/commons/utils/test/Positions.spec.ts +++ b/commons/utils/test/Positions.spec.ts @@ -1,4 +1,4 @@ -import { discretizedPositionDistance, Position3D } from 'decentraland-katalyst-utils/Positions' +import { discretizedPositionDistanceXZ, Position3D } from '../Positions' describe('Discretize Positions', () => { it('should convert close positions to equivalent', () => { @@ -6,7 +6,9 @@ describe('Discretize Positions', () => { const position2: Position3D = [10, 10, 10] const origin: Position3D = [0, 0, 0] - expect(discretizedPositionDistance()(origin, position1)).toEqual(discretizedPositionDistance()(origin, position2)) + expect(discretizedPositionDistanceXZ()(origin, position1)).toEqual( + discretizedPositionDistanceXZ()(origin, position2) + ) }) it('should preserve higher distances when they are in different intervals', () => { @@ -14,8 +16,8 @@ describe('Discretize Positions', () => { const position2: Position3D = [90, 90, 90] const origin: Position3D = [0, 0, 0] - expect(discretizedPositionDistance()(origin, position1)).toBeLessThan( - discretizedPositionDistance()(origin, position2) + expect(discretizedPositionDistanceXZ()(origin, position1)).toBeLessThan( + discretizedPositionDistanceXZ()(origin, position2) ) }) @@ -24,19 +26,21 @@ describe('Discretize Positions', () => { const position2: Position3D = [44400, 44004, 44444] const origin: Position3D = [0, 0, 0] - expect(discretizedPositionDistance()(origin, position1)).toEqual(discretizedPositionDistance()(origin, position2)) + expect(discretizedPositionDistanceXZ()(origin, position1)).toEqual( + discretizedPositionDistanceXZ()(origin, position2) + ) }) it('should calculate according to the intervals', () => { const origin: Position3D = [0, 0, 0] - expect(discretizedPositionDistance()(origin, [20, 0, 0])).toEqual(0) - expect(discretizedPositionDistance()(origin, [0, 32, 0])).toEqual(0) - expect(discretizedPositionDistance()(origin, [0, 0, 48])).toEqual(1) - expect(discretizedPositionDistance()(origin, [30, 30, 30])).toEqual(1) - expect(discretizedPositionDistance()(origin, [72, 0, 0])).toEqual(2) - expect(discretizedPositionDistance()(origin, [60, 60, 60])).toEqual(3) - expect(discretizedPositionDistance()(origin, [90, 90, 90])).toEqual(4) - expect(discretizedPositionDistance()(origin, [150, 150, 150])).toEqual(5) + expect(discretizedPositionDistanceXZ()(origin, [20, 0, 0])).toEqual(0) + expect(discretizedPositionDistanceXZ()(origin, [0, 0, 0])).toEqual(0) + expect(discretizedPositionDistanceXZ()(origin, [0, 0, 48])).toEqual(1) + expect(discretizedPositionDistanceXZ()(origin, [30, 0, 30])).toEqual(1) + expect(discretizedPositionDistanceXZ()(origin, [72, 0, 0])).toEqual(2) + expect(discretizedPositionDistanceXZ()(origin, [60, 0, 60])).toEqual(3) + expect(discretizedPositionDistanceXZ()(origin, [95, 0, 95])).toEqual(4) + expect(discretizedPositionDistanceXZ()(origin, [150, 0, 150])).toEqual(5) }) }) diff --git a/comms/README.md b/comms/README.md deleted file mode 100644 index b7dde803c..000000000 --- a/comms/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# Lighthouse / Comms Server -Comms is comprised of two distinct modules: One for Catalyst server called "Lighthouse" and other for clients & P2P network called "Peer Library". - -The Lighthouse is an server that tracks peers in different "layers" and "rooms" in order to enable them to make the necessary connections. It also handles authentication. - -The following docs show how to run a local instance of the Lighthouse, and how to run a simple react app that serves to test the local lighthouse. - -## Set up - -* Install libs - - `yarn install` - -* Set up a lighthouse instance on localhost:9000 - - `yarn bazel run comms/lighthouse:server` - -* Set up a client server on localhost:3001 - - `yarn bazel run comms/peer-react-app:devserver` - -* Open the client - - `open localhost:3001` - -## Lighthouse endpoints - -Try the following endpoints for a minimal monitoring of the rooms state. - -* List of rooms - - `curl localhost:9000/rooms` - -* List of rooms joined curreently by user - - `curl localhost:9000/rooms\?userId=${USER_ID}` -* Join user to room - - `curl -X PUT localhost:9000/rooms/${ROOM_ID} -d '{ "id": "${USER_ID}" }' -H "Content-Type: application/json"` - -* Leave user from room - - `curl -X DELETE localhost:9000/rooms/${ROOM_ID}/users/${USER_ID}` - -* List of users in room - - `curl localhost:9000/rooms/${ROOM_ID}` diff --git a/comms/lighthouse/.gitignore b/comms/lighthouse/.gitignore new file mode 100644 index 000000000..716d9a1e1 --- /dev/null +++ b/comms/lighthouse/.gitignore @@ -0,0 +1 @@ +serverStorage.json diff --git a/comms/lighthouse/BUILD.bazel b/comms/lighthouse/BUILD.bazel deleted file mode 100644 index 3f21fed99..000000000 --- a/comms/lighthouse/BUILD.bazel +++ /dev/null @@ -1,127 +0,0 @@ -load("@npm_bazel_typescript//:index.bzl", "ts_library") -load("@build_bazel_rules_nodejs//:defs.bzl", "nodejs_binary") -load("//tools/npm:package.bzl", "dataform_npm_package") -load("@npm_bazel_jasmine//:index.bzl", "jasmine_node_test") -load("@npm_bazel_rollup//:index.bzl", "rollup_bundle") - -package(default_visibility = ["//visibility:public"]) - -filegroup( - name = "static", - srcs = glob( - include = [ - "static/**/*", - ], - exclude = [ - "static/**/*.ts", - "static/**/*.tsx", - ], - ), -) - -ts_library( - name = "monitor", - srcs = glob([ - "static/monitor/src/**/*.ts", - "static/monitor/src/**/*.tsx", - ]), - module_name = "lighthouse-monitor", - deps = [ - "@npm//@types/react", - "@npm//@types/react-dom", - "@npm//react", - "@npm//react-dom", - "@npm//viz.js", - ], -) - -# naming this rule this way to include the resulting bundle in the desired directory 'static/monitor/src' as said in https://github.com/bazelbuild/rules_nodejs/issues/1379 -rollup_bundle( - name = "static/monitor/src/bundle", - srcs = [ - "@npm//react", - "@npm//react-dom", - ], - config_file = ":rollup.config.js", - entry_point = "static/monitor/src/monitor.tsx", - format = "umd", - output_dir = False, - sourcemap = "false", - deps = [ - ":monitor", - "@npm//@rollup/plugin-commonjs", - "@npm//@rollup/plugin-node-resolve", - "@npm//@wessberg/rollup-plugin-ts", - "@npm//rollup-plugin-json", - "@npm//rollup-plugin-node-globals", - ], -) - -ts_library( - name = "lighthouse", - srcs = glob(["src/**/*.ts"]), - module_name = "@katalyst/lighthouse", - deps = [ - "//commons/servers", - "//commons/utils", - "//contracts", - "@npm//@types", - "@npm//cors", - "@npm//dcl-crypto", - "@npm//express", - "@npm//fast-deep-equal", - "@npm//fp-future", - "@npm//isomorphic-fetch", - "@npm//morgan", - "@npm//peerjs-server", - "@npm//prom-client", - "@npm//web3x", - "@npm//wrtc", - "@npm//ws", - ], -) - -nodejs_binary( - name = "server", - data = [ - ":lighthouse", - ":static", - ":static/monitor/src/bundle", - "@npm//@bazel/typescript", - "@npm//typescript", - ], - entry_point = "src/server.ts", -) - -dataform_npm_package( - name = "package", - package_layers = [ - "//:common.package.json", - "lighthouse.package.json", - ], - deps = [], -) - -ts_library( - name = "tests", - testonly = 1, - srcs = glob(["test/**/*.ts"]), - tsconfig = "//:tsconfig.json", - deps = [ - ":lighthouse", - "//commons/servers", - "//commons/utils", - "@npm//@types/express", - "@npm//@types/jasmine", - "@npm//isomorphic-fetch", - "@npm//express", - ], -) - -jasmine_node_test( - name = "unit_test", - deps = [ - ":tests", - "@npm//jasmine", - ], -) diff --git a/comms/lighthouse/README.md b/comms/lighthouse/README.md new file mode 100644 index 000000000..139a9291e --- /dev/null +++ b/comms/lighthouse/README.md @@ -0,0 +1,36 @@ +# Lighthouse / Comms Server + +Comms is comprised of two distinct modules: One for Catalyst server called "Lighthouse" and other for clients & P2P network called "Peer Library". + +The Lighthouse is a server that tracks peers in different positions in order to enable them to make the necessary connections. It also handles authentication. + +The following docs show how to run a local instance of the Lighthouse. + +More information about the Peer Library component is available in this repository: https://github.com/decentraland/catalyst-comms-peer + +## Run tests + +``` +yarn build +yarn test +``` + +## Set up + +- Install libs + + `yarn install` + +- Build package + + `yarn build` + +- Set up a lighthouse instance on localhost:9000 + + `yarn start` + +## Lighthouse endpoints + +- Status + + `curl localhost:9000/status` diff --git a/comms/lighthouse/jasmine.json b/comms/lighthouse/jasmine.json new file mode 100644 index 000000000..8a1e63f6b --- /dev/null +++ b/comms/lighthouse/jasmine.json @@ -0,0 +1,6 @@ +{ + "spec_dir": "test", + "spec_files": ["**/*[sS]pec.ts"], + "random": false, + "helpers": ["helpers/reporter.ts"] +} diff --git a/comms/lighthouse/lighthouse.package.json b/comms/lighthouse/lighthouse.package.json deleted file mode 100644 index 08280a9f4..000000000 --- a/comms/lighthouse/lighthouse.package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "@katalyst/lighthouse-server", - "description": "Lighthouse server", - "dependencies": { - } -} \ No newline at end of file diff --git a/comms/lighthouse/package.json b/comms/lighthouse/package.json new file mode 100644 index 000000000..c3223caab --- /dev/null +++ b/comms/lighthouse/package.json @@ -0,0 +1,31 @@ +{ + "name": "@catalyst/lighthouse-server", + "description": "Lighthouse server", + "version": "0.1.0", + "author": "Decentraland Contributors", + "license": "Apache-2.0", + "scripts": { + "cleanup": "shx rm -rf dist node_modules", + "build": "tsc -b", + "start": "node ./dist/src/server.js", + "test": "jasmine-ts --config=jasmine.json" + }, + "dependencies": { + "@catalyst/commons": "0.1.0", + "@catalyst/contracts": "0.1.0", + "@dcl/archipelago": "1.0.0", + "@dcl/catalyst-api-specs": "1.0.1", + "cors": "2.8.5", + "dcl-catalyst-commons": "7.1.3", + "dcl-crypto": "2.3.0", + "express": "4.17.1", + "express-openapi-validator": "4.13.1", + "fast-deep-equal": "3.1.3", + "fp-future": "1.0.1", + "isomorphic-fetch": "3.0.0", + "morgan": "1.10.0", + "ms": "2.1.3", + "uuid": "8.3.2", + "ws": "8.2.1" + } +} diff --git a/comms/lighthouse/rollup.config.js b/comms/lighthouse/rollup.config.js deleted file mode 100644 index 574d08560..000000000 --- a/comms/lighthouse/rollup.config.js +++ /dev/null @@ -1,30 +0,0 @@ -import commonjs from '@rollup/plugin-commonjs' -import npm from '@rollup/plugin-node-resolve' -import react from 'react' -import reactDom from 'react-dom' -import globals from 'rollup-plugin-node-globals' - -/* eslint-disable @typescript-eslint/no-var-requires */ -const json = require('rollup-plugin-json') - -const allExternals = [] - -export default { - external: allExternals, - output: { - name: 'bundle' - }, - context: 'this', - plugins: [ - json(), - npm({ preferBuiltins: true, browser: true }), - commonjs({ - browser: true, - namedExports: { - react: Object.keys(react), - 'react-dom': Object.keys(reactDom) - } - }), - globals() - ] -} diff --git a/comms/lighthouse/serverStorage.json b/comms/lighthouse/serverStorage.json new file mode 100644 index 000000000..9e26dfeeb --- /dev/null +++ b/comms/lighthouse/serverStorage.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/comms/lighthouse/src/config/configService.ts b/comms/lighthouse/src/config/configService.ts new file mode 100644 index 000000000..06ac24956 --- /dev/null +++ b/comms/lighthouse/src/config/configService.ts @@ -0,0 +1,187 @@ +import { fetchJson } from 'dcl-catalyst-commons' +import ms from 'ms' +import { PeerParameters } from '../types' +import { ISimpleStorage } from './simpleStorage' + +export type ConfigKeyValue = { + key: string + value?: any +} + +export class LighthouseConfig { + static readonly MAX_PEERS_PER_ISLAND: LighthouseConfig = new LighthouseConfig({ + name: 'maxPeersPerIsland', + fromText: parseInt, + defaultValue: 100 + }) + + static readonly ARCHIPELAGO_JOIN_DISTANCE: LighthouseConfig = new LighthouseConfig({ + name: 'archipelagoJoinDistance', + fromText: parseInt, + defaultValue: 64 + }) + + static readonly ARCHIPELAGO_LEAVE_DISTANCE: LighthouseConfig = new LighthouseConfig({ + name: 'archipelagoLeaveDistance', + fromText: parseInt, + defaultValue: 80 + }) + + static readonly ARCHIPELAGO_FLUSH_FREQUENCY: LighthouseConfig = new LighthouseConfig({ + name: 'archipelagoFlushFrequency', + fromText: parseFloat, + defaultValue: 2.0 + }) + + static readonly HIGH_LOAD_PEERS_COUNT: LighthouseConfig = new LighthouseConfig({ + name: 'highLoadPeersCount', + fromText: parseInt, + defaultValue: 10000 + }) + + static readonly MAX_CONCURRENT_USERS: LighthouseConfig = new LighthouseConfig({ + name: 'maxConcurrentUsers', + fromText: parseInt, + defaultValue: 5000 + }) + + static readonly PEERS_CHECK_INTERVAL: LighthouseConfig = new LighthouseConfig({ + name: 'peersCheckInterval', + fromText: parseInt, + defaultValue: 60000 + }) + + static readonly PEER_PARAMETERS: LighthouseConfig = new LighthouseConfig({ + name: 'peerParameters', + fromText: JSON.parse, + defaultValue: {} // By default, we don't send additional parameters to peers. They use their default configuration + }) + + readonly name: string + readonly fromText: (config: string) => T + readonly defaultValue: T + + constructor({ name, fromText, defaultValue }: { name: string; fromText: (config: string) => T; defaultValue: T }) { + this.name = name + this.fromText = fromText + this.defaultValue = defaultValue + } + + static toEnvironmentName(name: string): string { + return name.replace(/[A-Z]/g, (letter) => `_${letter}`).toUpperCase() + } +} + +/** + * This service handles the lighthouse's configuration. There are four different levels of configuration, with the following precedence: + * Environment config >> storage config >> global config >> default config + */ +export class ConfigService { + private readonly listeners: Map void)[]> = new Map() + private readonly config: Config = {} + + constructor( + private readonly storage: ISimpleStorage, + private readonly fetchGlobalConfig: () => Promise, + private readonly envWrapper: EnvironmentWrapper + ) { + setInterval(() => this.updateConfig(), ms('15m')) + } + + async updateStorageConfigs(configs: ConfigKeyValue[]) { + for (const it of configs) { + if (typeof it.value !== 'undefined') { + await this.storage.setString(it.key, JSON.stringify(it.value)) + } else { + await this.storage.deleteKey(it.key) + } + } + + await this.updateConfig() + return this.getAllConfig() + } + + listenTo(config: LighthouseConfig, listener: (newValue: T) => void): void { + const listeners = this.listeners.get(config.name) + if (listeners) { + listeners.push(listener) + } else { + this.listeners.set(config.name, [listener]) + } + } + + getAllConfig() { + return this.config + } + + get(config: LighthouseConfig): T { + return this.config[config.name] + } + + /** + * This method reads storage and global config, and then updates the current on-memory registry. + * + * Note: visible for testing purposes + */ + async updateConfig() { + const storageConfig = await this.storage.getAll() + const globalConfig = await this.fetchGlobalConfig() + for (const [, config] of Object.entries(LighthouseConfig)) { + const configName = config.name + const configEnvironmentName = LighthouseConfig.toEnvironmentName(config.name) + const currentConfigValue = this.config[config.name] + let newConfigValue: any + if (this.envWrapper.isInEnv(configEnvironmentName)) { + newConfigValue = config.fromText(this.envWrapper.readFromEnv(configEnvironmentName)) + } else if (configName in storageConfig) { + newConfigValue = JSON.parse(storageConfig[configName]) + } else if (configName in globalConfig) { + newConfigValue = globalConfig[configName] + } else { + newConfigValue = config.defaultValue + } + if (currentConfigValue !== newConfigValue) { + this.config[configName] = newConfigValue + this.listeners.get(configName)?.forEach((listener) => listener(newConfigValue)) + } + } + } + + static async build(options: { + storage: ISimpleStorage + globalConfig: { ethNetwork: string } | { fetch: () => Promise } + envWrapper?: EnvironmentWrapper + }): Promise { + const globalConfig = options.globalConfig + const service = new ConfigService( + options.storage, + 'ethNetwork' in globalConfig ? () => fetchGlobalConfig(globalConfig.ethNetwork) : globalConfig.fetch, + options.envWrapper ?? buildEnvWrapper() + ) + await service.updateConfig() + return service + } +} + +export type Config = Record + +export type EnvironmentWrapper = { + isInEnv: (environmentKey: string) => boolean + readFromEnv: (environmentKey: string) => string +} + +function buildEnvWrapper(): EnvironmentWrapper { + return { + isInEnv: (environmentKey: string) => environmentKey in process.env, + readFromEnv: (environmentKey: string) => process.env[environmentKey]! + } +} + +async function fetchGlobalConfig(ethNetwork: string): Promise { + try { + const tld = ethNetwork === 'mainnet' ? 'org' : 'zone' + return (await fetchJson(`https://config.decentraland.${tld}/catalyst.json`)) as Config + } catch { + return {} + } +} diff --git a/comms/lighthouse/src/default_layers.ts b/comms/lighthouse/src/config/default_layers.ts similarity index 100% rename from comms/lighthouse/src/default_layers.ts rename to comms/lighthouse/src/config/default_layers.ts diff --git a/comms/lighthouse/src/simpleStorage.ts b/comms/lighthouse/src/config/simpleStorage.ts similarity index 89% rename from comms/lighthouse/src/simpleStorage.ts rename to comms/lighthouse/src/config/simpleStorage.ts index 64f4e4e18..baaaa0174 100644 --- a/comms/lighthouse/src/simpleStorage.ts +++ b/comms/lighthouse/src/config/simpleStorage.ts @@ -9,7 +9,16 @@ const deepCopy = (obj: any) => { return v8.deserialize(v8.serialize(obj)) } -export class SimpleStorage { +export interface ISimpleStorage { + clear(): Promise + getAll(): Promise + getString(key: string): Promise + getOrSetString(key: string, value: string): Promise + setString(key: string, value: string): Promise + deleteKey(key: string): Promise +} + +export class SimpleStorage implements ISimpleStorage { private _currentItems: object | undefined private _lastFlush: object | undefined diff --git a/comms/lighthouse/src/configService.ts b/comms/lighthouse/src/configService.ts deleted file mode 100644 index b36b2bb8b..000000000 --- a/comms/lighthouse/src/configService.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { SimpleStorage } from './simpleStorage' - -export type ConfigKeyValue = { - key: string - value?: any -} - -export class ConfigService { - private storage: SimpleStorage - - constructor(storage: SimpleStorage) { - this.storage = storage - } - - async updateConfigs(configs: ConfigKeyValue[]) { - for (const it of configs) { - if (typeof it.value !== 'undefined') { - await this.storage.setString(it.key, JSON.stringify(it.value)) - } else { - await this.storage.deleteKey(it.key) - } - } - - return await this.getConfig() - } - - async getConfig() { - const items = await this.storage.getAll() - Object.keys(items).forEach((key) => (items[key] = JSON.parse(items[key]))) - return items - } - - async get(key: string, ifNotPresent: () => any): Promise { - const item = await this.storage.getString(key) - return typeof item !== 'undefined' ? JSON.parse(item) : ifNotPresent() - } - - async getMaxPeersPerLayer(): Promise { - return await this.get('maxPeersPerLayer', () => parseInt(process.env.MAX_PER_LAYER ?? '100')) - } -} diff --git a/comms/lighthouse/src/layersService.ts b/comms/lighthouse/src/layersService.ts deleted file mode 100644 index 142f48971..000000000 --- a/comms/lighthouse/src/layersService.ts +++ /dev/null @@ -1,227 +0,0 @@ -import { Gauge } from 'prom-client' -import { ConfigService } from './configService' -import { LayerIsFullError, RequestError, UserMustBeInLayerError as PeerMustBeInLayerError } from './errors' -import { NotificationType, PeersService } from './peersService' -import { RoomsService } from './roomsService' -import { Layer, PeerInfo, PeerRequest } from './types' -import { getPeerId, removePeerAndNotify } from './utils' - -const ACTIVE_USERS_GAUGE = new Gauge({ - name: 'active_peers_count', - help: 'Number of active users connected to each layer', - labelNames: ['layer'] -}) - -type LayersServiceConfig = { - peersService: PeersService - existingLayers?: string[] - allowNewLayers?: boolean - layerCheckInterval?: number //In seconds - configService: ConfigService -} - -export class LayersService { - private layers: Record = {} - - private layerChecker: LayerChecker = new LayerChecker(this, this.peersService) - - private newLayer(layerId: string): Layer { - return { id: layerId, peers: [], rooms: {}, lastCheckTimestamp: Date.now() } - } - - constructor(private config: LayersServiceConfig) { - if (this.config.existingLayers) { - this.config.existingLayers.forEach((layerId) => this.createLayer(layerId)) - } - } - - get peersService() { - return this.config.peersService - } - - getLayerIds(): string[] { - return Object.keys(this.layers) - } - - getLayers(): Layer[] { - return Object.values(this.layers) - } - - getLayer(layerId: string): Layer | undefined { - return this.layers[layerId] - } - - getLayerPeers(layerId: string): PeerInfo[] { - const layer = this.layers[layerId] - - if (layer) { - this.checkLayerPeersIfNeeded(layer) - return this.peersService.getPeersInfo(layer.peers) - } else { - throw new RequestError('Layer not found', 'layer-not-found', 404) - } - } - - getRoomsService(layerId: string) { - if (!this.exists(layerId)) { - return undefined - } else { - return new RoomsService(layerId, this.layers[layerId].rooms, { ...this.config }) - } - } - - exists(layerId: string) { - return this.layers.hasOwnProperty(layerId) - } - - private removePeerFromOtherLayers(layerId: string, peerId: string) { - Object.keys(this.layers).forEach((otherLayerId) => { - if (otherLayerId !== layerId && this.isPeerInLayer(otherLayerId, peerId)) { - this.removePeerFromLayer(otherLayerId, peerId) - } - }) - } - - removePeerFromLayer(layerId: string, peerId: string) { - this.getRoomsService(layerId)?.removePeer(peerId) - const { container, removed } = removePeerAndNotify( - this.layers, - layerId, - peerId, - NotificationType.PEER_LEFT_LAYER, - 'layerId', - this.peersService, - !this.isDefaultLayer(layerId) - ) - if (removed) { - ACTIVE_USERS_GAUGE.dec({ layer: layerId }) - } - return container - } - - createLayer(layerId: string) { - return (this.layers[layerId] = this.newLayer(layerId)) - } - - async setPeerLayer(layerId: string, peer: PeerRequest) { - let layer = this.layers[layerId] - - if (!layer) { - layer = this.createLayer(layerId) - } - - const peerId = getPeerId(peer) - - if (!this.isPeerInLayer(layerId, peerId)) { - const peerInfo = this.peersService.ensurePeerInfo(peer) - - this.checkLayerPeersIfNeeded(layer) - - const maxPeers = await this.getMaxPeersFor(layer) - - if (maxPeers && layer.peers.length >= maxPeers) { - throw new LayerIsFullError(layer, peerId) - } - - this.removePeerFromOtherLayers(layerId, peerId) - - peerInfo.layer = layerId - - const peersToNotify = layer.peers.slice() - layer.peers.push(peerId) - this.peersService.notifyPeersById(peersToNotify, NotificationType.PEER_JOINED_LAYER, { - id: peerId, - userId: peerId, - peerId, - layerId - }) - ACTIVE_USERS_GAUGE.inc({ layer: layerId }) - } - - return layer - } - - async getMaxPeersFor(layer: Layer) { - return layer.maxPeers ?? (await this.config.configService.getMaxPeersPerLayer()) - } - - checkLayerPeersIfNeeded(layer: Layer) { - if (Date.now() - layer.lastCheckTimestamp > this.getLayerCheckInterval() * 1000) { - layer.lastCheckTimestamp = Date.now() - this.layerChecker.checkLayer(layer) - } - } - - private getLayerCheckInterval() { - return this.config.layerCheckInterval ?? 180 - } - - private isPeerInLayer(layerId: string, peerId: string) { - return this.layers[layerId].peers.includes(peerId) - } - - private isDefaultLayer(layerId: string) { - return this.config.existingLayers?.includes(layerId) - } - - async addPeerToRoom(layerId: string, roomId: string, peer: PeerRequest) { - const peerId = getPeerId(peer) - if (!this.isPeerInLayer(layerId, peerId)) { - throw new PeerMustBeInLayerError(layerId, peerId) - } - - return await this.getRoomsService(layerId)!.addPeerToRoom(roomId, peerId) - } - - removePeer(peerId: string) { - Object.keys(this.layers).forEach((layerId) => { - this.removePeerFromLayer(layerId, peerId) - }) - } - - getLayerTopology(layerId: string) { - return this.layers[layerId].peers.map((it) => ({ - ...this.peersService.getPeerInfo(it), - connectedPeerIds: this.peersService.getConnectedPeers(it) - })) - } - - getOptimalConnectionsFor(peerId: string, targetConnections: number, maxDistance: number) { - const peerInfo = this.peersService.getPeerInfo(peerId) - if (peerInfo.layer && peerInfo.position) { - return { - layerId: peerInfo.layer, - optimalConnections: this.peersService.getOptimalConnectionsFor( - peerInfo, - this.getLayerPeers(peerInfo.layer), - targetConnections, - maxDistance - ) - } - } - } -} - -class LayerChecker { - private layersBeingChecked: Set = new Set() - - constructor(private layersService: LayersService, private peersService?: PeersService) {} - - checkLayer(layer: Layer) { - if (!this.layersBeingChecked.has(layer.id)) { - this.layersBeingChecked.add(layer.id) - - //We execute the check as a background task to avoid impacting a request, even though this should be pretty quick - setTimeout(() => { - layer.peers.slice().forEach((it) => { - if (this.peersService && !this.peersService.peerExistsInRealm(it)) { - console.log(`Removing peer ${it} from layer ${layer.id} because it is not connected to Peer Network`) - this.layersService.removePeerFromLayer(layer.id, it) - } - }) - - this.layersBeingChecked.delete(layer.id) - }, 0) - } - } -} diff --git a/comms/lighthouse/src/metrics.ts b/comms/lighthouse/src/metrics.ts new file mode 100644 index 000000000..aa408bcf8 --- /dev/null +++ b/comms/lighthouse/src/metrics.ts @@ -0,0 +1,16 @@ +import { createTestMetricsComponent, validateMetricsDeclaration } from '@well-known-components/metrics' +import { getDefaultHttpMetrics } from '@well-known-components/metrics/dist/http' + +export const metrics = validateMetricsDeclaration({ + ...getDefaultHttpMetrics(), + dcl_lighthouse_connected_peers_count: { + help: 'Number of connected peers', + type: 'gauge' + }, + dcl_lighthouse_islands_count: { + help: 'Number of alive islands', + type: 'gauge' + } +}) + +export const metricsComponent = createTestMetricsComponent(metrics) diff --git a/comms/lighthouse/src/errors.ts b/comms/lighthouse/src/misc/errors.ts similarity index 94% rename from comms/lighthouse/src/errors.ts rename to comms/lighthouse/src/misc/errors.ts index da8288900..347a04320 100644 --- a/comms/lighthouse/src/errors.ts +++ b/comms/lighthouse/src/misc/errors.ts @@ -1,4 +1,4 @@ -import { Layer } from './types' +import { Layer } from '../types' export class RequestError extends Error { constructor(message: string, public statusMessage?: string, public status: number = 400) { diff --git a/comms/lighthouse/src/misc/graphvizTopology.ts b/comms/lighthouse/src/misc/graphvizTopology.ts new file mode 100644 index 000000000..89f439129 --- /dev/null +++ b/comms/lighthouse/src/misc/graphvizTopology.ts @@ -0,0 +1,16 @@ +import { PeerTopologyInfo } from '../types' + +export function toGraphviz(topology: PeerTopologyInfo[]) { + return ` + strict digraph graphName { + concentrate=true + ${topology.map((it) => `"${it.id}"[label="${it.id}\\nconns:${it.connectedPeers?.length ?? 0}"];`).join('\n')} + ${topology + .map((it) => + it.connectedPeers?.length + ? it.connectedPeers.map((connected) => `"${it.id}"->"${connected}";`).join('\n') + : `"${it.id}";` + ) + .join('\n')} + }` +} diff --git a/comms/lighthouse/src/handlers.ts b/comms/lighthouse/src/misc/handlers.ts similarity index 96% rename from comms/lighthouse/src/handlers.ts rename to comms/lighthouse/src/misc/handlers.ts index 42113d688..b16e070b6 100644 --- a/comms/lighthouse/src/handlers.ts +++ b/comms/lighthouse/src/misc/handlers.ts @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/ban-types */ -import { NextFunction, Request, RequestHandler, Response } from 'express-serve-static-core' -import { IRealm } from 'peerjs-server' +import { NextFunction, Request, RequestHandler, Response } from 'express' +import { IRealm } from '../peerjs-server' enum PeerHeaders { PeerToken = 'X-Peer-Token' diff --git a/comms/lighthouse/src/logging.ts b/comms/lighthouse/src/misc/logging.ts similarity index 100% rename from comms/lighthouse/src/logging.ts rename to comms/lighthouse/src/misc/logging.ts diff --git a/comms/lighthouse/src/naming.ts b/comms/lighthouse/src/misc/naming.ts similarity index 82% rename from comms/lighthouse/src/naming.ts rename to comms/lighthouse/src/misc/naming.ts index f85ca4e74..493f3826e 100644 --- a/comms/lighthouse/src/naming.ts +++ b/comms/lighthouse/src/misc/naming.ts @@ -1,7 +1,5 @@ -import { DAOClient } from 'decentraland-katalyst-commons/DAOClient' -import { ServerMetadata } from 'decentraland-katalyst-commons/ServerMetadata' -import { noReject } from 'decentraland-katalyst-utils/util' -import { lighthouseStorage } from './simpleStorage' +import { DAOClient, noReject, ServerMetadata } from '@catalyst/commons' +import { lighthouseStorage } from '../config/simpleStorage' export const defaultNames = [ 'zeus', @@ -58,7 +56,7 @@ async function getLighthousesNames(daoClient: DAOClient) { async function getName(server: ServerMetadata): Promise { //Timeout is an option that is supported server side, but not browser side, so it doesn't compile if we don't cast it to any try { - const statusResponse = await fetch(`${server.address}/comms/status`, { timeout: 5000 } as any) + const statusResponse = await fetch(`${server.baseUrl}/comms/status`, { timeout: 5000 } as any) const json = await statusResponse.json() if (json.name) { @@ -67,7 +65,7 @@ async function getName(server: ServerMetadata): Promise { throw new Error(`Response did not have the expected format. Response was: ${JSON.stringify(json)}`) } catch (e) { - console.warn(`Error while getting the name of ${server.address}, id: ${server.id}`, e.message) + console.warn(`Error while getting the name of ${server.baseUrl}, id: ${server.id}`, e.message) throw e } } diff --git a/comms/lighthouse/src/misc/utils.ts b/comms/lighthouse/src/misc/utils.ts new file mode 100644 index 000000000..0c4830c11 --- /dev/null +++ b/comms/lighthouse/src/misc/utils.ts @@ -0,0 +1,5 @@ +import { PeerRequest } from '../types' + +export function getPeerId(peer: PeerRequest): string { + return (peer.id ?? peer.peerId)! +} diff --git a/comms/lighthouse/src/peerjs-server/README.md b/comms/lighthouse/src/peerjs-server/README.md new file mode 100644 index 000000000..d529728b9 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/README.md @@ -0,0 +1,5 @@ +# Catalyst PeerJS Server Implementation + +Based upon Peer JS Server: https://github.com/peers/peerjs-server + +Originally forked in here: https://github.com/decentraland/peerjs-server diff --git a/comms/lighthouse/src/peerjs-server/config/index.ts b/comms/lighthouse/src/peerjs-server/config/index.ts new file mode 100644 index 000000000..9c40159f9 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/config/index.ts @@ -0,0 +1,43 @@ +import { IClient } from '../models/client' +import { IMessage } from '../models/message' +import { numericIdGenerator } from '../utils/idgenerator' + +export interface IConfig { + readonly port: number + readonly expire_timeout: number + readonly alive_timeout: number + readonly key: string + readonly path: string + readonly concurrent_limit: number + readonly proxied: boolean | string + readonly cleanup_out_msgs: number + readonly ssl?: { + key: string + cert: string + } + readonly authHandler: (client: IClient | undefined, message: IMessage) => Promise + readonly idGenerator: () => string + readonly transmissionFilter: (src: string, dst: string, message: IMessage) => Promise + readonly maxIdIterations: number +} + +const defaultConfig: IConfig = { + port: 9000, + expire_timeout: 5000, + alive_timeout: 60000, + key: 'peerjs', + path: '/myapp', + concurrent_limit: 5000, + proxied: false, + cleanup_out_msgs: 1000, + ssl: { + key: '', + cert: '' + }, + authHandler: () => Promise.resolve(true), + idGenerator: numericIdGenerator(), + maxIdIterations: 100000, + transmissionFilter: () => Promise.resolve(true) +} + +export default defaultConfig diff --git a/comms/lighthouse/src/peerjs-server/enums.ts b/comms/lighthouse/src/peerjs-server/enums.ts new file mode 100644 index 000000000..322cf249f --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/enums.ts @@ -0,0 +1,29 @@ +export enum Errors { + INVALID_KEY = 'Invalid key provided', + INVALID_TOKEN = 'Invalid token provided', + INVALID_WS_PARAMETERS = 'No token, or key supplied to websocket server', + CONNECTION_LIMIT_EXCEED = 'Server has reached its concurrent user limit', + NO_AVAILABLE_ID_FOUND = 'No available id has been found' +} + +export enum MessageType { + OPEN = 'OPEN', + LEAVE = 'LEAVE', + CANDIDATE = 'CANDIDATE', + OFFER = 'OFFER', + ANSWER = 'ANSWER', + REJECT = 'REJECT', + EXPIRE = 'EXPIRE', + HEARTBEAT = 'HEARTBEAT', + ID_TAKEN = 'ID-TAKEN', + ERROR = 'ERROR', + VALIDATION = 'VALIDATION', + VALIDATION_OK = 'VALIDATION_OK', + VALIDATION_NOK = 'VALIDATION_NOK', + ASSIGNED_ID = 'ASSIGNED_ID' +} + +export enum IdType { + SELF_ASSIGNED = 'SELF_ASSIGNED', + SERVER_ASSIGNED = 'SERVER_ASSIGNED' +} diff --git a/comms/lighthouse/src/peerjs-server/index.ts b/comms/lighthouse/src/peerjs-server/index.ts new file mode 100644 index 000000000..2ea7b71d6 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/index.ts @@ -0,0 +1,72 @@ +import express, { Express } from 'express' +import http from 'http' +import https from 'https' +import { Server } from 'net' +import defaultConfig, { IConfig } from './config' +import { createInstance } from './instance' +import { IRealm } from './models/realm' + +type Optional = { + [P in keyof T]?: T[P] | undefined +} + +function ExpressPeerServer(server: Server, options?: Optional): Express { + const app = express() + + const newOptions: IConfig = { + ...defaultConfig, + ...options + } + + if (newOptions.proxied) { + app.set('trust proxy', newOptions.proxied === 'false' ? false : !!newOptions.proxied) + } + + app.on('mount', () => { + if (!server) { + throw new Error('Server is not passed to constructor - ' + "can't start PeerServer") + } + + createInstance({ app, server, options: newOptions }) + }) + + return app +} + +function PeerServer(options: Optional = {}, callback?: (server: Server) => void): Express { + const app = express() + + const newOptions: IConfig = { + ...defaultConfig, + ...options + } + + let path = newOptions.path + const port = newOptions.port + + if (!path.startsWith('/')) { + path = '/' + path + } + + if (!path.endsWith('/')) { + path += '/' + } + + let server: Server + + if (newOptions.ssl && newOptions.ssl.key && newOptions.ssl.cert) { + server = https.createServer(options.ssl!, app) + delete (newOptions as any).ssl + } else { + server = http.createServer(app) + } + + const peerjs = ExpressPeerServer(server, newOptions) + app.use(peerjs) + + server.listen(port, () => callback?.(server)) + + return peerjs +} + +export { ExpressPeerServer, PeerServer, IRealm } diff --git a/comms/lighthouse/src/peerjs-server/instance.ts b/comms/lighthouse/src/peerjs-server/instance.ts new file mode 100644 index 000000000..972415e86 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/instance.ts @@ -0,0 +1,81 @@ +import express from 'express' +import { Server } from 'net' +import { IConfig } from './config' +import { MessageHandler } from './messageHandler' +import { IClient } from './models/client' +import { IMessage } from './models/message' +import { IRealm, Realm } from './models/realm' +import { CheckBrokenConnections } from './services/checkBrokenConnections' +import { IMessagesExpire, MessagesExpire } from './services/messagesExpire' +import { IWebSocketServer, WebSocketServer } from './services/webSocketServer' + +export const createInstance = ({ + app, + server, + options +}: { + app: express.Application + server: Server + options: IConfig +}): void => { + const config = options + const realm: IRealm = new Realm() + + app.set('peerjs-realm', realm) + + const messageHandler = new MessageHandler(realm, config) + + const messagesExpire: IMessagesExpire = new MessagesExpire({ realm, config, messageHandler }) + const checkBrokenConnections = new CheckBrokenConnections({ + realm, + config, + onClose: (client) => { + app.emit('disconnect', client) + } + }) + + const wss: IWebSocketServer = new WebSocketServer({ + server, + realm, + config + }) + + function handleError(runnable: () => Promise) { + runnable().catch((e) => wss.emit('error', e)) + } + + wss.on('connection', (client: IClient) => + handleError(async () => { + const messageQueue = realm.getMessageQueueById(client.getId()) + + if (messageQueue) { + let message: IMessage | undefined + + while ((message = messageQueue.readMessage())) { + await messageHandler.handle(client, message) + } + realm.clearMessageQueue(client.getId()) + } + + app.emit('connection', client) + }) + ) + + wss.on('message', (client: IClient, message: IMessage) => + handleError(async () => { + app.emit('message', client, message) + await messageHandler.handle(client, message) + }) + ) + + wss.on('close', (client: IClient) => { + app.emit('disconnect', client) + }) + + wss.on('error', (error: Error) => { + app.emit('error', error) + }) + + messagesExpire.startMessagesExpiration() + checkBrokenConnections.start() +} diff --git a/comms/lighthouse/src/peerjs-server/messageHandler/handler.ts b/comms/lighthouse/src/peerjs-server/messageHandler/handler.ts new file mode 100644 index 000000000..dc8dc8df6 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/messageHandler/handler.ts @@ -0,0 +1,4 @@ +import { IClient } from '../models/client' +import { IMessage } from '../models/message' + +export type Handler = (client: IClient | undefined, message: IMessage) => Promise diff --git a/comms/lighthouse/src/peerjs-server/messageHandler/handlers/heartbeat/index.ts b/comms/lighthouse/src/peerjs-server/messageHandler/handlers/heartbeat/index.ts new file mode 100644 index 000000000..c4dc66441 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/messageHandler/handlers/heartbeat/index.ts @@ -0,0 +1,10 @@ +import { IClient } from '../../../models/client' + +export const HeartbeatHandler = async (client: IClient | undefined): Promise => { + if (client) { + const nowTime = new Date().getTime() + client.setLastPing(nowTime) + } + + return true +} diff --git a/comms/lighthouse/src/peerjs-server/messageHandler/handlers/index.ts b/comms/lighthouse/src/peerjs-server/messageHandler/handlers/index.ts new file mode 100644 index 000000000..109573687 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/messageHandler/handlers/index.ts @@ -0,0 +1,2 @@ +export { HeartbeatHandler } from './heartbeat' +export { TransmissionHandler } from './transmission' diff --git a/comms/lighthouse/src/peerjs-server/messageHandler/handlers/transmission/index.ts b/comms/lighthouse/src/peerjs-server/messageHandler/handlers/transmission/index.ts new file mode 100644 index 000000000..684fe302b --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/messageHandler/handlers/transmission/index.ts @@ -0,0 +1,77 @@ +import { MessageType } from '../../../enums' +import { IClient } from '../../../models/client' +import { IMessage } from '../../../models/message' +import { IRealm } from '../../../models/realm' + +export const TransmissionHandler = ({ + realm, + transmissionFilter +}: { + realm: IRealm + transmissionFilter?: (src: string, dst: string, message: IMessage) => Promise +}): ((client: IClient | undefined, message: IMessage) => Promise) => { + const handle = async (client: IClient | undefined, message: IMessage) => { + if (!client?.isAuthenticated()) { + // We ignore transmission messages for peers that are not authenticated + return true + } + + const type = message.type + const srcId = message.src + const dstId = message.dst + + if (transmissionFilter && !(await transmissionFilter(srcId, dstId, message))) { + // We ignore transmission messages that are filtered + return true + } + + const destinationClient = realm.getClientById(dstId) + + // User is connected! + if (destinationClient) { + const socket = destinationClient.getSocket() + try { + if (socket) { + const data = JSON.stringify(message) + + socket.send(data) + } else { + // Neither socket no res available. Peer dead? + throw new Error('Peer dead') + } + } catch (e) { + // This happens when a peer disconnects without closing connections and + // the associated WebSocket has not closed. + // Tell other side to stop trying. + if (socket) { + socket.close() + } else { + realm.removeClientById(destinationClient.getId()) + } + + await handle(client, { + type: MessageType.LEAVE, + src: dstId, + dst: srcId + }) + } + } else { + // Wait for this client to connect/reconnect (XHR) for important + // messages. + const ignoredTypes = [MessageType.LEAVE, MessageType.EXPIRE] + + if (!ignoredTypes.includes(type) && dstId) { + realm.addMessageToQueue(dstId, message) + } else if (type === MessageType.LEAVE && !dstId) { + realm.removeClientById(srcId) + } else { + // Unavailable destination specified with message LEAVE or EXPIRE + // Ignore + } + } + + return true + } + + return handle +} diff --git a/comms/lighthouse/src/peerjs-server/messageHandler/handlersRegistry.ts b/comms/lighthouse/src/peerjs-server/messageHandler/handlersRegistry.ts new file mode 100644 index 000000000..1dede263b --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/messageHandler/handlersRegistry.ts @@ -0,0 +1,29 @@ +import { MessageType } from '../enums' +import { IClient } from '../models/client' +import { IMessage } from '../models/message' +import { Handler } from './handler' + +export interface IHandlersRegistry { + registerHandler(messageType: MessageType, handler: Handler): void + handle(client: IClient | undefined, message: IMessage): Promise +} + +export class HandlersRegistry implements IHandlersRegistry { + private readonly handlers: Map = new Map() + + public registerHandler(messageType: MessageType, handler: Handler): void { + if (this.handlers.has(messageType)) return + + this.handlers.set(messageType, handler) + } + + public async handle(client: IClient | undefined, message: IMessage): Promise { + const { type } = message + + const handler = this.handlers.get(type) + + if (!handler) return false + + return handler(client, message) + } +} diff --git a/comms/lighthouse/src/peerjs-server/messageHandler/index.ts b/comms/lighthouse/src/peerjs-server/messageHandler/index.ts new file mode 100644 index 000000000..2b1da41cf --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/messageHandler/index.ts @@ -0,0 +1,88 @@ +import { IConfig } from '../config/index' +import { MessageType } from '../enums' +import { IClient } from '../models/client' +import { IMessage } from '../models/message' +import { IRealm } from '../models/realm' +import { Handler } from './handler' +import { HeartbeatHandler, TransmissionHandler } from './handlers' +import { HandlersRegistry, IHandlersRegistry } from './handlersRegistry' + +export interface IMessageHandler { + handle(client: IClient | undefined, message: IMessage): boolean | Promise +} + +export class MessageHandler implements IMessageHandler { + constructor( + realm: IRealm, + config: IConfig, + private readonly handlersRegistry: IHandlersRegistry = new HandlersRegistry() + ) { + const transmissionHandler: Handler = TransmissionHandler({ realm, transmissionFilter: config.transmissionFilter }) + const heartbeatHandler: Handler = HeartbeatHandler + + const handleTransmission: Handler = (client: IClient | undefined, { type, src, dst, payload }: IMessage) => { + return transmissionHandler(client, { + type, + src, + dst, + payload + }) + } + + const handleHeartbeat = (client: IClient | undefined, message: IMessage) => heartbeatHandler(client, message) + + const handleValidation = async (client: IClient | undefined, message: IMessage) => { + const result = await config.authHandler(client, message) + const socket = client?.getSocket() + + try { + if (socket) { + if (result) { + client!.setAuthenticated(true) + } + + const data = JSON.stringify({ type: result ? MessageType.VALIDATION_OK : MessageType.VALIDATION_NOK }) + + socket.send(data) + + if (!result) { + socket.close() + } + } else { + // Neither socket no res available. Peer dead? + throw new Error('Peer dead') + } + } catch (e) { + // This happens when a peer disconnects without closing connections and + // the associated WebSocket has not closed. + // Tell other side to stop trying. + if (socket) { + socket.close() + } else { + realm.removeClientById(client!.getId()) + } + + await this.handle(client, { + type: MessageType.LEAVE, + src: client!.getId(), + dst: client!.getId() + }) + } + + return true + } + + this.handlersRegistry.registerHandler(MessageType.HEARTBEAT, handleHeartbeat) + this.handlersRegistry.registerHandler(MessageType.VALIDATION, handleValidation) + this.handlersRegistry.registerHandler(MessageType.OFFER, handleTransmission) + this.handlersRegistry.registerHandler(MessageType.ANSWER, handleTransmission) + this.handlersRegistry.registerHandler(MessageType.REJECT, handleTransmission) + this.handlersRegistry.registerHandler(MessageType.CANDIDATE, handleTransmission) + this.handlersRegistry.registerHandler(MessageType.LEAVE, handleTransmission) + this.handlersRegistry.registerHandler(MessageType.EXPIRE, handleTransmission) + } + + public handle(client: IClient | undefined, message: IMessage): Promise { + return this.handlersRegistry.handle(client, message) + } +} diff --git a/comms/lighthouse/src/peerjs-server/models/client.ts b/comms/lighthouse/src/peerjs-server/models/client.ts new file mode 100644 index 000000000..772f10f04 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/models/client.ts @@ -0,0 +1,87 @@ +import { IdType } from '../enums' +import { MyWebSocket } from '../services/webSocketServer/webSocket' + +export interface IClient { + getId(): string + + getIdType(): IdType + + getToken(): string + + getMsg(): string + + getSocket(): MyWebSocket | null + + setSocket(socket: MyWebSocket | null): void + + getLastPing(): number + + setLastPing(lastPing: number): void + + send(data: any): void + + isAuthenticated(): boolean + + setAuthenticated(authenticated: boolean): void +} + +export class Client implements IClient { + private readonly id: string + private readonly token: string + private readonly msg: string + private socket: MyWebSocket | null = null + private lastPing: number = new Date().getTime() + private authenticated: boolean = false + private idType: IdType + + constructor({ id, token, msg, idType }: { id: string; token: string; msg: string; idType: IdType }) { + this.id = id + this.token = token + this.msg = msg + this.idType = idType + } + + public getId(): string { + return this.id + } + + public getIdType(): IdType { + return this.idType + } + + public getToken(): string { + return this.token + } + + public getMsg(): string { + return this.msg + } + + public getSocket(): MyWebSocket | null { + return this.socket + } + + public setSocket(socket: MyWebSocket | null): void { + this.socket = socket + } + + public getLastPing(): number { + return this.lastPing + } + + public setLastPing(lastPing: number): void { + this.lastPing = lastPing + } + + public send(data: any): void { + this.socket?.send(JSON.stringify(data)) + } + + public isAuthenticated(): boolean { + return this.authenticated + } + + public setAuthenticated(authenticated: boolean): void { + this.authenticated = authenticated + } +} diff --git a/comms/lighthouse/src/peerjs-server/models/message.ts b/comms/lighthouse/src/peerjs-server/models/message.ts new file mode 100644 index 000000000..83c176bf2 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/models/message.ts @@ -0,0 +1,8 @@ +import { MessageType } from '../enums' + +export interface IMessage { + readonly type: MessageType + readonly src: string + readonly dst: string + readonly payload?: any +} diff --git a/comms/lighthouse/src/peerjs-server/models/messageQueue.ts b/comms/lighthouse/src/peerjs-server/models/messageQueue.ts new file mode 100644 index 000000000..b8559f520 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/models/messageQueue.ts @@ -0,0 +1,37 @@ +import { IMessage } from './message' + +export interface IMessageQueue { + getLastReadAt(): number + + addMessage(message: IMessage): void + + readMessage(): IMessage | undefined + + getMessages(): IMessage[] +} + +export class MessageQueue implements IMessageQueue { + private lastReadAt: number = new Date().getTime() + private readonly messages: IMessage[] = [] + + public getLastReadAt(): number { + return this.lastReadAt + } + + public addMessage(message: IMessage): void { + this.messages.push(message) + } + + public readMessage(): IMessage | undefined { + if (this.messages.length > 0) { + this.lastReadAt = new Date().getTime() + return this.messages.shift()! + } + + return undefined + } + + public getMessages(): IMessage[] { + return this.messages + } +} diff --git a/comms/lighthouse/src/peerjs-server/models/realm.ts b/comms/lighthouse/src/peerjs-server/models/realm.ts new file mode 100644 index 000000000..4ff5fdf74 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/models/realm.ts @@ -0,0 +1,96 @@ +import { v4 as uuidv4 } from 'uuid' +import { metricsComponent } from '../../metrics' +import { IClient } from './client' +import { IMessage } from './message' +import { IMessageQueue, MessageQueue } from './messageQueue' + +export interface IRealm { + getClientsIds(): string[] + + hasClient(id: string): boolean + + getClientById(clientId: string): IClient | undefined + + getClientsIdsWithQueue(): string[] + + setClient(client: IClient, id: string): void + + removeClientById(id: string): boolean + + getMessageQueueById(id: string): IMessageQueue | undefined + + addMessageToQueue(id: string, message: IMessage): void + + clearMessageQueue(id: string): void + + generateClientId(): string + + getClientsCount(): number +} + +export class Realm implements IRealm { + private readonly clients: Map = new Map() + private readonly messageQueues: Map = new Map() + + public getClientsIds(): string[] { + return [...this.clients.keys()] + } + + public hasClient(id: string): boolean { + return this.clients.has(id) + } + + public getClientById(clientId: string): IClient | undefined { + return this.clients.get(clientId) + } + + public getClientsIdsWithQueue(): string[] { + return [...this.messageQueues.keys()] + } + + public setClient(client: IClient, id: string): void { + this.clients.set(id, client) + metricsComponent.observe('dcl_lighthouse_connected_peers_count', {}, this.clients.size) + } + + public removeClientById(id: string): boolean { + const client = this.getClientById(id) + + if (!client) return false + + this.clients.delete(id) + metricsComponent.observe('dcl_lighthouse_connected_peers_count', {}, this.clients.size) + + return true + } + + public getMessageQueueById(id: string): IMessageQueue | undefined { + return this.messageQueues.get(id) + } + + public addMessageToQueue(id: string, message: IMessage): void { + if (!this.getMessageQueueById(id)) { + this.messageQueues.set(id, new MessageQueue()) + } + + this.getMessageQueueById(id)!.addMessage(message) + } + + public clearMessageQueue(id: string): void { + this.messageQueues.delete(id) + } + + public generateClientId(): string { + let clientId = uuidv4() + + while (this.getClientById(clientId)) { + clientId = uuidv4() + } + + return clientId + } + + public getClientsCount(): number { + return this.clients.size + } +} diff --git a/comms/lighthouse/src/peerjs-server/services/checkBrokenConnections/index.ts b/comms/lighthouse/src/peerjs-server/services/checkBrokenConnections/index.ts new file mode 100644 index 000000000..28b27054c --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/services/checkBrokenConnections/index.ts @@ -0,0 +1,78 @@ +import { IConfig } from '../../config' +import { IClient } from '../../models/client' +import { IRealm } from '../../models/realm' + +const DEFAULT_CHECK_INTERVAL = 300 + +type CustomConfig = Pick + +export class CheckBrokenConnections { + public readonly checkInterval: number + private timeoutId: NodeJS.Timeout | null = null + private readonly realm: IRealm + private readonly config: CustomConfig + private readonly onClose?: (client: IClient) => void + + constructor({ + realm, + config, + checkInterval = DEFAULT_CHECK_INTERVAL, + onClose + }: { + realm: IRealm + config: CustomConfig + checkInterval?: number + onClose?: (client: IClient) => void + }) { + this.realm = realm + this.config = config + this.onClose = onClose + this.checkInterval = checkInterval + } + + public start(): void { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + } + + this.timeoutId = setTimeout(() => { + this.checkConnections() + + this.timeoutId = null + + this.start() + }, this.checkInterval) + } + + public stop(): void { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + this.timeoutId = null + } + } + + private checkConnections(): void { + const clientsIds = this.realm.getClientsIds() + + const now = new Date().getTime() + const { alive_timeout: aliveTimeout } = this.config + + for (const clientId of clientsIds) { + const client = this.realm.getClientById(clientId)! + const timeSinceLastPing = now - client.getLastPing() + + if (timeSinceLastPing < aliveTimeout) continue + + try { + client.getSocket()?.close() + } finally { + this.realm.clearMessageQueue(clientId) + this.realm.removeClientById(clientId) + + client.setSocket(null) + + this.onClose?.(client) + } + } + } +} diff --git a/comms/lighthouse/src/peerjs-server/services/messagesExpire/index.ts b/comms/lighthouse/src/peerjs-server/services/messagesExpire/index.ts new file mode 100644 index 000000000..57b024144 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/services/messagesExpire/index.ts @@ -0,0 +1,91 @@ +import { IConfig } from '../../config' +import { MessageType } from '../../enums' +import { IMessageHandler } from '../../messageHandler' +import { IRealm } from '../../models/realm' + +export interface IMessagesExpire { + startMessagesExpiration(): void + stopMessagesExpiration(): void +} + +type CustomConfig = Pick + +export class MessagesExpire implements IMessagesExpire { + private readonly realm: IRealm + private readonly config: CustomConfig + private readonly messageHandler: IMessageHandler + + private timeoutId: NodeJS.Timeout | null = null + + constructor({ + realm, + config, + messageHandler + }: { + realm: IRealm + config: CustomConfig + messageHandler: IMessageHandler + }) { + this.realm = realm + this.config = config + this.messageHandler = messageHandler + } + + public startMessagesExpiration(): void { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + } + + // Clean up outstanding messages + this.timeoutId = setTimeout(async () => { + try { + await this.pruneOutstanding() + } catch (e) { + console.error('Error while prunning expired messages', e) + } + + this.timeoutId = null + + this.startMessagesExpiration() + }, this.config.cleanup_out_msgs) + } + + public stopMessagesExpiration(): void { + if (this.timeoutId) { + clearTimeout(this.timeoutId) + this.timeoutId = null + } + } + + private async pruneOutstanding() { + const destinationClientsIds = this.realm.getClientsIdsWithQueue() + + const now = new Date().getTime() + const maxDiff = this.config.expire_timeout + + const seen: Record = {} + + for (const destinationClientId of destinationClientsIds) { + const messageQueue = this.realm.getMessageQueueById(destinationClientId)! + const lastReadDiff = now - messageQueue.getLastReadAt() + + if (lastReadDiff < maxDiff) continue + + const messages = messageQueue.getMessages() + + for (const message of messages) { + if (!seen[message.src]) { + await this.messageHandler.handle(undefined, { + type: MessageType.EXPIRE, + src: message.dst, + dst: message.src + }) + + seen[message.src] = true + } + } + + this.realm.clearMessageQueue(destinationClientId) + } + } +} diff --git a/comms/lighthouse/src/peerjs-server/services/webSocketServer/index.ts b/comms/lighthouse/src/peerjs-server/services/webSocketServer/index.ts new file mode 100644 index 000000000..d02fe5e08 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/services/webSocketServer/index.ts @@ -0,0 +1,187 @@ +import EventEmitter from 'events' +import { IncomingMessage } from 'http' +import url from 'url' +import WebSocketLib from 'ws' +import { IConfig } from '../../config' +import { Errors, IdType, MessageType } from '../../enums' +import { Client, IClient } from '../../models/client' +import { IRealm } from '../../models/realm' +import { MyWebSocket } from './webSocket' + +export interface IWebSocketServer extends EventEmitter { + readonly path: string +} + +interface IAuthParams { + id?: string + token?: string + key?: string +} + +type CustomConfig = Pick + +const WS_PATH = 'peerjs' + +export class WebSocketServer extends EventEmitter implements IWebSocketServer { + public readonly path: string + private readonly realm: IRealm + private readonly config: CustomConfig + public readonly socketServer: WebSocketLib.Server + + constructor({ server, realm, config }: { server: any; realm: IRealm; config: CustomConfig }) { + super() + + this.setMaxListeners(0) + + this.realm = realm + this.config = config + + const path = this.config.path + this.path = `${path}${path.endsWith('/') ? '' : '/'}${WS_PATH}` + + this.socketServer = new WebSocketLib.Server({ path: this.path, server }) + + this.socketServer.on('connection', (socket: MyWebSocket, req) => this._onSocketConnection(socket, req)) + this.socketServer.on('error', (error: Error) => this._onSocketError(error)) + } + + private _onSocketConnection(socket: MyWebSocket, req: IncomingMessage): void { + const { query = {} } = url.parse(req.url!, true) + + const { token, key }: IAuthParams = query + const { id, idType } = + typeof query.id === 'string' + ? { id: query.id as string, idType: IdType.SELF_ASSIGNED } + : { id: this.getFreeId(this.realm), idType: IdType.SERVER_ASSIGNED } + + if (!id) { + return this._sendErrorAndClose(socket, Errors.NO_AVAILABLE_ID_FOUND) + } + + if (!token || !key) { + return this._sendErrorAndClose(socket, Errors.INVALID_WS_PARAMETERS) + } + + if (key !== this.config.key) { + return this._sendErrorAndClose(socket, Errors.INVALID_KEY) + } + + const client = this.realm.getClientById(id) + + if (client) { + if (token !== client.getToken()) { + // ID-taken, invalid token + socket.send( + JSON.stringify({ + type: MessageType.ID_TAKEN, + payload: { msg: 'ID is taken' } + }) + ) + + return socket.close() + } + + return this._configureWS(socket, client) + } + + if (idType === IdType.SERVER_ASSIGNED) { + socket.send( + JSON.stringify({ + type: MessageType.ASSIGNED_ID, + payload: { id } + }) + ) + } + + this._registerClient({ socket, id, token, idType }) + } + + private getFreeId(realm: IRealm): string | undefined { + let id = this.config.idGenerator() + let currentIterations = 0 + while (realm.hasClient(id)) { + currentIterations++ + if (currentIterations > this.config.maxIdIterations) { + return + } + + id = this.config.idGenerator() + } + + return id + } + + private _onSocketError(error: Error): void { + // handle error + this.emit('error', error) + } + + private generateRandomMessage() { + return Math.random().toString(36).substring(2) + } + + private _registerClient({ + socket, + id, + token, + idType + }: { + socket: MyWebSocket + id: string + token: string + idType: IdType + }): void { + // Check concurrent limit + const clientsCount = this.realm.getClientsIds().length + + if (clientsCount >= this.config.concurrent_limit) { + return this._sendErrorAndClose(socket, Errors.CONNECTION_LIMIT_EXCEED) + } + + const payload = this.generateRandomMessage() + + const newClient: IClient = new Client({ id, token, msg: payload, idType }) + this.realm.setClient(newClient, id) + socket.send(JSON.stringify({ type: MessageType.OPEN, payload })) + + this._configureWS(socket, newClient) + } + + private _configureWS(socket: MyWebSocket, client: IClient): void { + client.setSocket(socket) + + // Cleanup after a socket closes. + socket.on('close', () => { + if (client.getSocket() === socket) { + this.realm.removeClientById(client.getId()) + this.emit('close', client) + } + }) + + // Handle messages from peers. + socket.on('message', (data: WebSocketLib.Data) => { + try { + const message = JSON.parse(data as string) + + message.src = client.getId() + + this.emit('message', client, message) + } catch (e) { + this.emit('error', e) + } + }) + + this.emit('connection', client) + } + + private _sendErrorAndClose(socket: MyWebSocket, msg: Errors): void { + socket.send( + JSON.stringify({ + type: MessageType.ERROR, + payload: { msg } + }) + ) + + socket.close() + } +} diff --git a/comms/lighthouse/src/peerjs-server/services/webSocketServer/webSocket.ts b/comms/lighthouse/src/peerjs-server/services/webSocketServer/webSocket.ts new file mode 100644 index 000000000..980c74db8 --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/services/webSocketServer/webSocket.ts @@ -0,0 +1,4 @@ +import EventEmitter from 'events' +import WebSocketLib from 'ws' + +export type MyWebSocket = WebSocketLib & EventEmitter diff --git a/comms/lighthouse/src/peerjs-server/utils/idgenerator.ts b/comms/lighthouse/src/peerjs-server/utils/idgenerator.ts new file mode 100644 index 000000000..952983d8a --- /dev/null +++ b/comms/lighthouse/src/peerjs-server/utils/idgenerator.ts @@ -0,0 +1,8 @@ +export function numericIdGenerator() { + let currentId = 0 + + return () => { + currentId++ + return currentId.toString() + } +} diff --git a/comms/lighthouse/src/peers/archipelagoService.ts b/comms/lighthouse/src/peers/archipelagoService.ts new file mode 100644 index 000000000..de20c892f --- /dev/null +++ b/comms/lighthouse/src/peers/archipelagoService.ts @@ -0,0 +1,121 @@ +import { + ArchipelagoController, + defaultArchipelagoController, + Island, + IslandUpdates, + PeerPositionChange +} from '@dcl/archipelago' +import { ConfigService, LighthouseConfig } from '../config/configService' +import { metricsComponent } from '../metrics' +import { AppServices } from '../types' +import { PeersService } from './peersService' +import { PeerOutgoingMessageType } from './protocol/messageTypes' + +export class ArchipelagoService { + private readonly controller: ArchipelagoController + private readonly peersServiceGetter: () => PeersService + private readonly configService: ConfigService + + constructor({ configService, peersService }: Pick) { + this.controller = defaultArchipelagoController({ + flushFrequency: configService.get(LighthouseConfig.ARCHIPELAGO_FLUSH_FREQUENCY), + archipelagoParameters: { + joinDistance: configService.get(LighthouseConfig.ARCHIPELAGO_JOIN_DISTANCE), + leaveDistance: configService.get(LighthouseConfig.ARCHIPELAGO_LEAVE_DISTANCE), + maxPeersPerIsland: configService.get(LighthouseConfig.MAX_PEERS_PER_ISLAND) + } + }) + + configService.listenTo(LighthouseConfig.ARCHIPELAGO_JOIN_DISTANCE, (joinDistance) => + this.controller.modifyOptions({ joinDistance }) + ) + configService.listenTo(LighthouseConfig.ARCHIPELAGO_LEAVE_DISTANCE, (leaveDistance) => + this.controller.modifyOptions({ leaveDistance }) + ) + + this.configService = configService + + this.controller.subscribeToUpdates(this.onIslandUpdates.bind(this)) + + this.peersServiceGetter = peersService + } + + updatePeerPosition(peerId: string, positionUpdate: Omit) { + this.controller.setPeersPositions({ ...positionUpdate, id: peerId }) + } + + get peersService() { + return this.peersServiceGetter() + } + + clearPeer(id: string) { + this.controller.clearPeers(id) + } + + async onIslandUpdates(updates: IslandUpdates) { + const cachedIslands: Record = {} + + const getIsland = async (id: string) => { + if (id in cachedIslands) return cachedIslands[id] + + const island = await this.controller.getIsland(id) + + if (island) { + cachedIslands[id] = island + return island + } + } + + for (const id in updates) { + const update = updates[id] + + const island = await getIsland(updates[id].islandId) + // This could be undefined for a short lived island, in the round trip between the worker & this service. + if (island) { + switch (update.action) { + case 'changeTo': { + const fromIsland: Island | undefined = update.fromIslandId + ? await getIsland(update.fromIslandId) + : undefined + + this.peersService.notifyIslandChange(id, island, fromIsland) + break + } + case 'leave': { + this.peersService.sendUpdateToIsland(id, island, PeerOutgoingMessageType.PEER_LEFT_ISLAND) + break + } + } + } + } + + try { + metricsComponent.observe('dcl_lighthouse_islands_count', {}, await this.getIslandsCount()) + } catch { + // mordor + } + } + + async areInSameIsland(peerId: string, ...otherPeerIds: string[]) { + const peersData = await this.controller.getPeersData([peerId, ...otherPeerIds]) + const expectedIslandId = peersData[peerId]?.islandId + return !!expectedIslandId && Object.values(peersData).every((data) => data.islandId === expectedIslandId) + } + + async getIslands(): Promise<{ ok: false; message: string } | { ok: true; islands: Island[] }> { + const peersCount = this.peersService.getActivePeersCount() + + if (peersCount >= this.configService.get(LighthouseConfig.HIGH_LOAD_PEERS_COUNT)) + return { ok: false, message: 'Cannot query islands during high load' } + + return { ok: true, islands: await this.controller.getIslands() } + } + + async getIsland(islandId: string): Promise { + return this.controller.getIsland(islandId) + } + + async getIslandsCount(): Promise { + return this.controller.getIslandsCount() + } +} diff --git a/comms/lighthouse/src/peers/auth.ts b/comms/lighthouse/src/peers/auth.ts new file mode 100644 index 000000000..8fad3fbdb --- /dev/null +++ b/comms/lighthouse/src/peers/auth.ts @@ -0,0 +1,59 @@ +import { httpProviderForNetwork } from '@catalyst/contracts' +import { Authenticator } from 'dcl-crypto' +import { IdType, MessageType } from '../peerjs-server/enums' +import { IClient } from '../peerjs-server/models/client' +import { IMessage } from '../peerjs-server/models/message' +import { PeersService } from '../peers/peersService' + +export type AuthHandlerConfiguration = { + noAuth: boolean + peersServiceGetter: () => PeersService + ethNetwork: string +} + +export function peerAuthHandler({ + noAuth, + peersServiceGetter, + ethNetwork +}: AuthHandlerConfiguration): (client: IClient | undefined, message: IMessage) => Promise { + return async (client, message) => { + if (noAuth) { + return true + } + + if (!client) { + // client not registered + return false + } + if ( + client.getIdType() === IdType.SELF_ASSIGNED && + client.getId().toLowerCase() !== message.payload[0]?.payload?.toLowerCase() + ) { + // client id mistmaches with auth signer + return false + } + try { + const provider = httpProviderForNetwork(ethNetwork) + const result = await Authenticator.validateSignature(client.getMsg(), message.payload, provider) + + const address = message.payload[0].payload + + if (!peersServiceGetter().existsPeerWithAddress(address)) { + peersServiceGetter().setPeerAddress(client.getId(), message.payload[0].payload) + } else { + client.send({ + type: MessageType.ID_TAKEN, + payload: { msg: 'ETH Address is taken' } + }) + + client.getSocket()?.close() + return false + } + + return result.ok + } catch (e) { + console.log(`error while recovering address for client ${client.getId()}`, e) + return false + } + } +} diff --git a/comms/lighthouse/src/idService.ts b/comms/lighthouse/src/peers/idService.ts similarity index 86% rename from comms/lighthouse/src/idService.ts rename to comms/lighthouse/src/peers/idService.ts index a74cb8550..ed930184e 100644 --- a/comms/lighthouse/src/idService.ts +++ b/comms/lighthouse/src/peers/idService.ts @@ -1,4 +1,4 @@ -import { DEFAULT_ID_ALPHABET } from 'decentraland-katalyst-utils/util' +import { DEFAULT_ID_ALPHABET } from '@catalyst/commons' export type IdServiceConfig = { alphabet: string @@ -13,7 +13,7 @@ export class IdService { constructor(config: Partial = {}) { this.config = { alphabet: config.alphabet ?? DEFAULT_ID_ALPHABET, - idLength: config.idLength ?? 2 + idLength: config.idLength ?? 3 } } diff --git a/comms/lighthouse/src/peers/initPeerJsServer.ts b/comms/lighthouse/src/peers/initPeerJsServer.ts new file mode 100644 index 000000000..ceb90d05a --- /dev/null +++ b/comms/lighthouse/src/peers/initPeerJsServer.ts @@ -0,0 +1,48 @@ +import { Express } from 'express' +import { Server } from 'net' +import { LighthouseConfig } from '../config/configService' +import { ExpressPeerServer } from '../peerjs-server' +import { IConfig } from '../peerjs-server/config' +import { AppServices } from '../types' +import { peerAuthHandler } from './auth' +import { PeerMessagesHandler } from './peerMessagesHandler' + +export type PeerJSServerInitOptions = { + netServer: Server + noAuth: boolean + ethNetwork: string + messagesHandler: PeerMessagesHandler +} & AppServices + +export function initPeerJsServer({ + netServer, + idService, + noAuth, + peersService, + archipelagoService, + messagesHandler, + configService, + ethNetwork +}: PeerJSServerInitOptions): Express { + const options: Partial = { + path: '/', + idGenerator: () => idService.nextId(), + authHandler: peerAuthHandler({ noAuth, peersServiceGetter: peersService, ethNetwork }), + concurrent_limit: configService.get(LighthouseConfig.MAX_CONCURRENT_USERS), + transmissionFilter: (src, dst) => archipelagoService().areInSameIsland(src, dst) + } + + const peerServer = ExpressPeerServer(netServer, options) + + peerServer.on('disconnect', (client: any) => { + console.log('User disconnected from server socket. Removing from archipelago: ' + client.id) + archipelagoService().clearPeer(client.id) + peersService().clearPeer(client.id) + }) + + peerServer.on('error', console.log) + + peerServer.on('message', messagesHandler as any) + + return peerServer +} diff --git a/comms/lighthouse/src/peers/peerMessagesHandler.ts b/comms/lighthouse/src/peers/peerMessagesHandler.ts new file mode 100644 index 000000000..37ed94379 --- /dev/null +++ b/comms/lighthouse/src/peers/peerMessagesHandler.ts @@ -0,0 +1,33 @@ +import { Position3D } from '@catalyst/commons' +import { IClient } from '../peerjs-server/models/client' +import { AppServices } from '../types' +import { HeartbeatMessage, PeerIncomingMessage, PeerIncomingMessageType } from './protocol/messageTypes' + +export type PeerMessagesHandler = (client: IClient, message: PeerIncomingMessage) => any + +export function defaultPeerMessagesHandler({ peersService, archipelagoService }: AppServices) { + return (client: IClient, message: PeerIncomingMessage) => { + if (client.isAuthenticated()) { + switch (message.type) { + case PeerIncomingMessageType.HEARTBEAT: + handleHeartbeat(message, client) + } + } + } + + function handleHeartbeat(message: HeartbeatMessage, client: IClient) { + const { position, connectedPeerIds, parcel } = message.payload + peersService().updateTopology(client.getId(), connectedPeerIds) + peersService().updatePeerParcel(client.getId(), parcel) + peersService().updatePeerPosition(client.getId(), position) + + if (position) { + const positionUpdate: { position: Position3D; preferedIslandId?: string } = { position } + if ('preferedIslandId' in message.payload) { + positionUpdate.preferedIslandId = message.payload.preferedIslandId + } + + archipelagoService().updatePeerPosition(client.getId(), positionUpdate) + } + } +} diff --git a/comms/lighthouse/src/peers/peersCheckJob.ts b/comms/lighthouse/src/peers/peersCheckJob.ts new file mode 100644 index 000000000..94adf5112 --- /dev/null +++ b/comms/lighthouse/src/peers/peersCheckJob.ts @@ -0,0 +1,42 @@ +/** + * Job that runs periodically to check that every registered peer is actually connected, and clearing those who are not + */ + +import { LighthouseConfig } from '../config/configService' +import { AppServices } from '../types' + +export async function peersCheckJob({ + peersService, + configService +}: Pick) { + let jobTimeoutId: NodeJS.Timeout | number | undefined + + function clearNotConnectedPeers() { + peersService().clearNotConnectedPeers() + } + + return { + start() { + if (jobTimeoutId) return false // Shouldn't start twice + + const schedule = () => { + jobTimeoutId = setTimeout(() => { + console.info('Checking not connected peers') + clearNotConnectedPeers() + schedule() + }, configService.get(LighthouseConfig.PEERS_CHECK_INTERVAL)) + } + + console.info('Starting check peers job') + + schedule() + + return true + }, + stop() { + if (!jobTimeoutId) return false + clearTimeout(jobTimeoutId as any) + return true + } + } +} diff --git a/comms/lighthouse/src/peers/peersService.ts b/comms/lighthouse/src/peers/peersService.ts new file mode 100644 index 000000000..f23a65fe4 --- /dev/null +++ b/comms/lighthouse/src/peers/peersService.ts @@ -0,0 +1,235 @@ +/* eslint-disable @typescript-eslint/ban-types */ +import { discretizedPositionDistanceXZ, PeerConnectionHint, Position3D } from '@catalyst/commons' +import { Island } from '@dcl/archipelago' +import { LighthouseConfig } from '../config/configService' +import { IRealm } from '../peerjs-server' +import { AppServices, PeerInfo, PeerRequest, PeerTopologyInfo } from '../types' +import { PeerOutgoingMessage, PeerOutgoingMessageType } from './protocol/messageTypes' + +require('isomorphic-fetch') + +export interface IPeersService { + getPeerInfo(peerId: string): PeerInfo + getPeersInfo(peerIds: string[]): PeerInfo[] + + ensurePeerInfo(peer: PeerRequest): PeerInfo + getOptimalConnectionsFor(peer: PeerInfo, otherPeers: PeerInfo[], maxDistance: number): PeerConnectionHint[] +} + +export class PeersService implements IPeersService { + private peersTopology: Record = {} + + // This structure may contain information of peers that have already disconnected. To know if a peer is disconnected, check the realm + private peers: Record = {} + + constructor( + private realmProvider: () => IRealm, + private services: Pick, + private distanceFunction: (p1: Position3D, p2: Position3D) => number = discretizedPositionDistanceXZ() + ) {} + + sendMessageToPeer(peerId: string, message: Omit) { + const client = this.peerRealm.getClientById(peerId) + + if (client) { + client.send({ + ...message, + dst: peerId, + src: '__lighthouse__' + }) + } + } + + updateTopology(peerId: string, connectedPeerIds: string[]) { + this.peersTopology[peerId] = connectedPeerIds + } + + private get peerRealm() { + return this.realmProvider() + } + + getConnectedPeers(peerId: string): string[] | undefined { + return this.peersTopology[peerId] + } + + setPeerAddress(peerId: string, address: string) { + const peerInfo = this.ensurePeerInfo({ id: peerId }) + peerInfo.address = address + } + + existsPeerWithAddress(address: string) { + return this.realmProvider() + .getClientsIds() + .some((it) => this.getPeerInfo(it)?.address?.toLowerCase() === address.toLowerCase()) + } + + peerExistsInRealm(peerId: string) { + return !!this.peerRealm.getClientById(peerId) + } + + getPeerInfo(peerId: string): PeerInfo { + const client = this.peerRealm.getClientById(peerId) + const peer = this.peers[peerId] ?? { id: peerId } + + if (client) { + peer.lastPing = client.getLastPing() + } + + return peer + } + + getPeersInfo(peerIds?: string[]): PeerInfo[] { + if (!peerIds) peerIds = Object.keys(this.peers) + + return peerIds.map((id) => this.getPeerInfo(id)) + } + + ensurePeerInfo(peer: PeerRequest): PeerInfo { + const peerId = (peer.id ?? peer.peerId)! + const existing = this.peers[peerId] + + if (existing) { + if (peer.protocolVersion) { + existing.protocolVersion = peer.protocolVersion + } + return existing + } else { + this.peers[peerId] = { id: peerId, protocolVersion: peer.protocolVersion } + return this.peers[peerId] + } + } + + updatePeerParcel(peerId: string, parcel?: [number, number]) { + const peerInfo = this.ensurePeerInfo({ id: peerId }) + peerInfo.parcel = parcel + } + + updatePeerPosition(peerId: string, position?: Position3D) { + const peerInfo = this.ensurePeerInfo({ id: peerId }) + peerInfo.position = position + } + + getOptimalConnectionsFor(peer: PeerInfo, otherPeers: PeerInfo[], maxDistance: number): PeerConnectionHint[] { + const hints: PeerConnectionHint[] = [] + + otherPeers.forEach((it) => { + if (it.id !== peer.id && it.position) { + const distance = this.distanceFunction(peer.position!, it.position) + if (distance <= maxDistance) { + hints.push({ + id: it.id, + distance, + position: it.position + }) + } + } + }) + + return ( + hints + .sort((h1, h2) => { + const distanceDiff = h1.distance - h2.distance + // If the distance is the same, we randomize + return distanceDiff === 0 ? Math.random() : distanceDiff + }) + // We don't send more than 100 peer positions for now + .slice(0, 100) + ) + } + + sendUpdateToIsland( + peerId: string, + island: Island, + type: PeerOutgoingMessageType.PEER_JOINED_ISLAND | PeerOutgoingMessageType.PEER_LEFT_ISLAND + ) { + const info = this.getPeerInfo(peerId) + + if (!info.position) { + console.warn( + `Tried to send updates of a peer ${peerId} for which we don't have a position. This shouldn't happen.` + ) + return + } + + for (const peer of island.peers) { + if (peer.id !== info.id) { + this.sendMessageToPeer(peer.id, { + type, + payload: { + islandId: island.id, + peer: { id: info.id, position: info.position } + } + }) + } + } + } + + getActivePeersCount() { + return this.peerRealm.getClientsCount() + } + + notifyIslandChange(peerChangingId: string, island: Island, fromIsland: Island | undefined) { + this.sendMessageToPeer(peerChangingId, { + type: PeerOutgoingMessageType.CHANGE_ISLAND, + payload: { + islandId: island.id, + peers: island.peers.map((it) => ({ id: it.id, position: it.position })) + } + }) + + this.sendUpdateToIsland(peerChangingId, island, PeerOutgoingMessageType.PEER_JOINED_ISLAND) + + if (fromIsland) { + this.sendUpdateToIsland(peerChangingId, fromIsland, PeerOutgoingMessageType.PEER_LEFT_ISLAND) + } + } + + getUsersParcels(): [number, number][] { + const result: [number, number][] = [] + + for (const id of this.peerRealm.getClientsIds()) { + const parcel = this.peers[id]?.parcel + if (parcel) { + result.push(parcel) + } + } + + return result + } + + getConnectedPeersInfo(): { ok: true; peers: PeerInfo[] } | { ok: false; message: string } { + const peersCount = this.getActivePeersCount() + + if (peersCount >= this.services.configService.get(LighthouseConfig.HIGH_LOAD_PEERS_COUNT)) + return { ok: false, message: 'Cannot query peers during high load' } + + return { ok: true, peers: this.getPeersInfo(this.realmProvider().getClientsIds()) } + } + + clearPeer(peerId: string) { + delete this.peers[peerId] + delete this.peersTopology[peerId] + } + + clearNotConnectedPeers() { + for (const id in this.peers) { + if (!this.realmProvider().hasClient(id)) { + console.warn(`Clearing peer ${id} because it wasn't connected to the lighthouse`) + this.clearPeer(id) + this.services.archipelagoService().clearPeer(id) + } + } + } + + getTopology(): { ok: true; topology: PeerTopologyInfo[] } | { ok: false; message: string } { + const peersCount = this.getActivePeersCount() + + if (peersCount >= this.services.configService.get(LighthouseConfig.HIGH_LOAD_PEERS_COUNT)) + return { ok: false, message: 'Cannot query topology during high load' } + + return { + ok: true, + topology: Object.entries(this.peersTopology).map(([id, connectedPeers]) => ({ id, connectedPeers })) + } + } +} diff --git a/comms/lighthouse/src/peers/protocol/messageTypes.ts b/comms/lighthouse/src/peers/protocol/messageTypes.ts new file mode 100644 index 000000000..598f2cc3a --- /dev/null +++ b/comms/lighthouse/src/peers/protocol/messageTypes.ts @@ -0,0 +1,59 @@ +import { Position3D } from '@catalyst/commons' + +// OUTGOING +export enum PeerOutgoingMessageType { + PEER_LEFT_ISLAND = 'PEER_LEFT_ISLAND', + PEER_JOINED_ISLAND = 'PEER_JOINED_ISLAND', + OPTIMAL_NETWORK_RESPONSE = 'OPTIMAL_NETWORK_RESPONSE', + CHANGE_ISLAND = 'CHANGE_ISLAND' +} + +export type PeerWithPosition = { + id: string + position: [number, number, number] +} + +export type ChangeIsland = { + type: PeerOutgoingMessageType.CHANGE_ISLAND + payload: { + islandId: string + peers: PeerWithPosition[] + } +} + +export type PeerJoinedIsland = { + type: PeerOutgoingMessageType.PEER_LEFT_ISLAND + payload: { + islandId: string + peer: PeerWithPosition + } +} + +export type PeerLeftIsland = { + type: PeerOutgoingMessageType.PEER_JOINED_ISLAND + payload: { + islandId: string + peer: PeerWithPosition + } +} + +export type PeerOutgoingMessageContent = ChangeIsland | PeerJoinedIsland | PeerLeftIsland + +export type PeerOutgoingMessage = { readonly src: string; readonly dst: string } & PeerOutgoingMessageContent + +// INCOMING +export enum PeerIncomingMessageType { + HEARTBEAT = 'HEARTBEAT' +} + +export type HeartbeatMessage = { + type: PeerIncomingMessageType.HEARTBEAT + payload: { + connectedPeerIds: string[] + parcel?: [number, number] + position?: Position3D + preferedIslandId?: string + } +} + +export type PeerIncomingMessage = HeartbeatMessage diff --git a/comms/lighthouse/src/peersService.ts b/comms/lighthouse/src/peersService.ts deleted file mode 100644 index 00679ee98..000000000 --- a/comms/lighthouse/src/peersService.ts +++ /dev/null @@ -1,165 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-types */ -import { discretizedPositionDistance, PeerConnectionHint, Position } from 'decentraland-katalyst-utils/Positions' -import { IRealm } from 'peerjs-server' -import { PeerInfo, PeerRequest } from './types' - -export enum NotificationType { - PEER_LEFT_ROOM = 'PEER_LEFT_ROOM', - PEER_LEFT_LAYER = 'PEER_LEFT_LAYER', - PEER_JOINED_LAYER = 'PEER_JOINED_LAYER', - PEER_JOINED_ROOM = 'PEER_JOINED_ROOM' -} - -require('isomorphic-fetch') - -export interface IPeersService { - notifyPeersById(peerIds: string[], type: NotificationType, payload: object): void - - getPeerInfo(peerId: string): PeerInfo - getPeersInfo(peerIds: string[]): PeerInfo[] - - ensurePeerInfo(peer: PeerRequest): PeerInfo - getOptimalConnectionsFor( - peer: PeerInfo, - otherPeers: PeerInfo[], - targetConnections: number, - maxDistance: number - ): PeerConnectionHint[] -} - -export class PeersService implements IPeersService { - private peersTopology: Record = {} - - // This structure contains information of all peers, even those that have disconnected. To know if a peer is disconnected, check the realm - private peers: Record = {} - - constructor( - private realmProvider: () => IRealm, - private distanceFunction: (p1: Position, p2: Position) => number = discretizedPositionDistance() - ) {} - - notifyPeers(peers: PeerInfo[], type: NotificationType, payload: object) { - this.notifyPeersById( - peers.map((it) => it.id), - type, - payload - ) - } - - notifyPeersById(peerIds: string[], type: NotificationType, payload: object) { - console.log(`Sending ${type} notification to: `, peerIds) - peerIds.forEach((id) => { - const client = this.peerRealm!.getClientById(id) - if (client) { - client.send({ - type, - src: '__lighthouse_notification__', - dst: id, - payload - }) - } - }) - } - - updateTopology(peerId: string, connectedPeerIds: string[]) { - this.peersTopology[peerId] = connectedPeerIds - } - - private get peerRealm() { - return this.realmProvider() - } - - getConnectedPeers(peerId: string): string[] | undefined { - return this.peersTopology[peerId] - } - - setPeerAddress(peerId: string, address: string) { - const peerInfo = this.ensurePeerInfo({ id: peerId }) - peerInfo.address = address - } - - existsPeerWithAddress(address: string) { - return this.realmProvider() - .getClientsIds() - .some((it) => this.getPeerInfo(it)?.address?.toLowerCase() === address.toLowerCase()) - } - - peerExistsInRealm(peerId: string) { - return !!this.peerRealm.getClientById(peerId) - } - - getPeerInfo(peerId: string): PeerInfo { - const client = this.peerRealm.getClientById(peerId) - const peer = this.peers[peerId] ?? { id: peerId } - - if (client) { - peer.lastPing = client.getLastPing() - } - - return peer - } - - getPeersInfo(peerIds: string[]): PeerInfo[] { - return peerIds.map((id) => this.getPeerInfo(id)) - } - - ensurePeerInfo(peer: PeerRequest): PeerInfo { - const peerId = (peer.id ?? peer.peerId)! - const existing = this.peers[peerId] - - if (existing) { - if (peer.protocolVersion) { - existing.protocolVersion = peer.protocolVersion - } - return existing - } else { - this.peers[peerId] = { id: peerId, protocolVersion: peer.protocolVersion } - return this.peers[peerId] - } - } - - updatePeerParcel(peerId: string, parcel?: [number, number]) { - if (this.peers[peerId]) { - this.peers[peerId].parcel = parcel - } - } - - updatePeerPosition(peerId: string, position?: Position) { - if (this.peers[peerId]) { - this.peers[peerId].position = position - } - } - - getOptimalConnectionsFor( - peer: PeerInfo, - otherPeers: PeerInfo[], - targetConnections: number, - maxDistance: number - ): PeerConnectionHint[] { - const hints: PeerConnectionHint[] = [] - - otherPeers.forEach((it) => { - if (it.id !== peer.id && it.position) { - const distance = this.distanceFunction(peer.position!, it.position) - if (distance <= maxDistance) { - hints.push({ - id: it.id, - distance, - position: it.position - }) - } - } - }) - - return ( - hints - .sort((h1, h2) => { - const distanceDiff = h1.distance - h2.distance - // If the distance is the same, we randomize - return distanceDiff === 0 ? Math.random() : distanceDiff - }) - // We don't send more than 100 peer positions for now - .slice(0, 100) - ) - } -} diff --git a/comms/lighthouse/src/roomsService.ts b/comms/lighthouse/src/roomsService.ts deleted file mode 100644 index b1f541e82..000000000 --- a/comms/lighthouse/src/roomsService.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { IPeersService, NotificationType } from './peersService' -import { PeerInfo, Room } from './types' -import { removePeerAndNotify } from './utils' - -type RoomsFilter = Partial<{ - peerId: string -}> - -type RoomsServiceConfig = { - peersService: IPeersService -} - -function newRoom(roomId: string): Room { - return { id: roomId, peers: [] } -} - -export class RoomsService { - constructor(layer: string, private rooms: Record, private config: RoomsServiceConfig) {} - - private get peersService() { - return this.config.peersService - } - - getRoomIds(filter?: RoomsFilter): string[] { - const peerId = filter?.peerId - - return peerId - ? Object.entries(this.rooms) - .filter(([, room]) => room.peers.includes(peerId)) - .map(([id]) => id) - : Object.keys(this.rooms) - } - - getPeers(roomId: string): PeerInfo[] { - return this.peersService.getPeersInfo(this.rooms[roomId]?.peers) - } - - async addPeerToRoom(roomId: string, peerId: string) { - let room = this.rooms[roomId] - - if (!room) { - this.rooms[roomId] = room = newRoom(roomId) - } - - if (!room.peers.includes(peerId)) { - const peersToNotify = room.peers.slice() - room.peers.push(peerId) - this.config.peersService?.notifyPeersById(peersToNotify, NotificationType.PEER_JOINED_ROOM, { - id: peerId, - userId: peerId, - peerId, - roomId - }) - } - - return room - } - - removePeerFromRoom(roomId: string, peerId: string) { - const { container } = removePeerAndNotify( - this.rooms, - roomId, - peerId, - NotificationType.PEER_LEFT_ROOM, - 'roomId', - this.peersService - ) - return container - } - - removePeer(peerId: string) { - Object.keys(this.rooms).forEach((room) => this.removePeerFromRoom(room, peerId)) - } -} diff --git a/comms/lighthouse/src/routes.ts b/comms/lighthouse/src/routes.ts index 6272d6d43..cf9ba25e6 100644 --- a/comms/lighthouse/src/routes.ts +++ b/comms/lighthouse/src/routes.ts @@ -1,13 +1,10 @@ -import { validateSignatureHandler } from 'decentraland-katalyst-commons/handlers' -import { Metrics } from 'decentraland-katalyst-commons/metrics' -import express, { RequestHandler } from 'express' -import { IRealm } from 'peerjs-server' -import { ConfigService } from './configService' -import { RequestError } from './errors' -import { requireAll, requireOneOf, validatePeerToken } from './handlers' -import { LayersService } from './layersService' -import { PeersService } from './peersService' -import { Layer, PeerInfo } from './types' +import { asyncHandler, validateSignatureFromHeaderHandler, validateSignatureHandler } from '@catalyst/commons' +import { Island, PeerData } from '@dcl/archipelago' +import express, { Request, Response } from 'express' +import { LighthouseConfig } from './config/configService' +import { toGraphviz } from './misc/graphvizTopology' +import { requireAll } from './misc/handlers' +import { AppServices, PeerInfo } from './types' export type RoutesOptions = { env?: any @@ -17,240 +14,129 @@ export type RoutesOptions = { restrictedAccessSigner: string } -export type Services = { - layersService: LayersService - realmProvider: () => IRealm - peersService: PeersService - configService: ConfigService -} - -export function configureRoutes(app: express.Express, services: Services, options: RoutesOptions) { - const { layersService, realmProvider: getPeerJsRealm, peersService, configService } = services - - const validateLayerExists = (req, res, next) => { - if (layersService.exists(req.params.layerId)) { - next() - } else { - res.status(404).send({ status: 'layer-not-found' }) - } - } +export function configureRoutes( + app: express.Express, + services: Pick, + options: RoutesOptions +) { + const { configService } = services - const getStatus: RequestHandler = async (req, res) => { + const getStatus = async (req: Request, res: Response) => { const status: any = { name: options.name, version: options.version, currenTime: Date.now(), env: options.env, - ready: true + ready: true, + usersCount: services.peersService().getActivePeersCount(), + islandsCount: await services.archipelagoService().getIslandsCount(), + maxUsers: configService.get(LighthouseConfig.MAX_CONCURRENT_USERS) } - const globalMaxPerLayer = await configService.getMaxPeersPerLayer() - - if (req.query.includeLayers === 'true') { - status.layers = layersService.getLayers().map((it) => mapLayerToJson(it, globalMaxPerLayer, true)) + if (req.query.includeUsersParcels) { + status.usersParcels = services.peersService().getUsersParcels() } res.send(status) } - const getLayers: RequestHandler = async (req, res) => { - const globalMaxPerLayer = await configService.getMaxPeersPerLayer() - res.send( - layersService.getLayers().map((it) => mapLayerToJson(it, globalMaxPerLayer, req.query.usersParcels === 'true')) - ) + const putConfig = async (req: Request, res: Response) => { + const configKeyValues = req.body.config + if (!Array.isArray(configKeyValues) || configKeyValues.some((it) => !it.key)) { + res.status(400).send( + JSON.stringify({ + status: 'bad-request', + message: 'Expected array body with {key: string, value?: string} elements' + }) + ) + } else { + const config = await configService.updateStorageConfigs(configKeyValues) + res.send(config) + } } - const getByLayerId = async (req, res) => { - const globalMaxPerLayer = await configService.getMaxPeersPerLayer() - res.send(mapLayerToJson(layersService.getLayer(req.params.layerId)!, globalMaxPerLayer)) + const getConfig = async (_req: Request, res: Response) => { + const config = configService.getAllConfig() + res.send(config) } - const GetUsersByLayerId = (req, res) => { - res.send(mapUsersToJson(layersService.getLayerPeers(req.params.layerId))) - } + function toSimpleIsland(island: Island) { + function toPeerInfo(peer: PeerData): PeerInfo & { preferedIslandId?: string } { + const info = services.peersService().getPeerInfo(peer.id) - const getRoomsByLayerId = (req, res) => { - res.send(layersService.getRoomsService(req.params.layerId)!.getRoomIds({ peerId: req.query.userId })) - } + return { ...info, preferedIslandId: peer.preferedIslandId } + } - const getRoomId = (req, res) => { - const roomUsers = layersService.getRoomsService(req.params.layerId)!.getPeers(req.params.roomId) - if (typeof roomUsers === 'undefined') { - res.status(404).send({ status: 'room-not-found' }) - } else { - res.send(mapUsersToJson(roomUsers)) + return { + id: island.id, + peers: island.peers.map(toPeerInfo), + maxPeers: island.maxPeers, + center: island.center, + radius: island.radius } } - const putLayerId = async (req, res, next) => { - const { layerId } = req.params - try { - const layer = await layersService.setPeerLayer(layerId, req.body) - res.send(mapUsersToJson(peersService.getPeersInfo(layer.peers))) - } catch (err) { - handleError(err, res, next) + const getIslands = async (_req: Request, res: Response) => { + const islandsResponse = await services.archipelagoService().getIslands() + if (islandsResponse.ok) { + res.send({ ...islandsResponse, islands: islandsResponse.islands.map(toSimpleIsland) }) + } else { + res.send(islandsResponse) } } - const putRoomId = async (req, res, next) => { - const { layerId, roomId } = req.params - try { - const room = await layersService.addPeerToRoom(layerId, roomId, req.body) - res.send(mapUsersToJson(peersService.getPeersInfo(room.peers))) - } catch (err) { - handleError(err, res, next) + const getIsland = async (req: Request, res: Response) => { + const island = await services.archipelagoService().getIsland(req.params.islandId) + + if (island) { + res.send(toSimpleIsland(island)) + } else { + res.status(404).send({ status: 'not-found' }) } } - const deleteUserFromRoomById = (req, res) => { - const { roomId, userId, layerId } = req.params - const room = layersService.getRoomsService(layerId)?.removePeerFromRoom(roomId, userId) - res.send(mapUsersToJson(peersService.getPeersInfo(room?.peers ?? []))) - } + const getPeers = async (_req: Request, res: Response) => { + const peersResponse = services.peersService().getConnectedPeersInfo() - const deleteUserId = (req, res) => { - const { userId, layerId } = req.params - const layer = layersService.removePeerFromLayer(layerId, userId) - res.send(mapUsersToJson(peersService.getPeersInfo(layer?.peers ?? []))) + res.send(peersResponse) } - const getTopology = (req, res) => { - const { layerId } = req.params - const topologyInfo = layersService.getLayerTopology(layerId) - if (req.query.format === 'graphviz') { - res.send(` - strict digraph graphName { - concentrate=true - ${topologyInfo - .map((it) => `"${it.id}"[label="${it.id}\\nconns:${it.connectedPeerIds?.length ?? 0}"];`) - .join('\n')} - ${topologyInfo - .map((it) => - it.connectedPeerIds?.length - ? it.connectedPeerIds.map((connected) => `"${it.id}"->"${connected}";`).join('\n') - : `"${it.id}";` - ) - .join('\n')} - }`) - } else { - res.send(topologyInfo) - } + const getPeerParameters = async (_req: Request, res: Response) => { + const config = services.configService.get(LighthouseConfig.PEER_PARAMETERS) + + res.send(config) } - const putConfig = async (req, res) => { - const configKeyValues = req.body.config - if (!Array.isArray(configKeyValues) || configKeyValues.some((it) => !it.key)) { - res.status(400).send( - JSON.stringify({ - status: 'bad-request', - message: 'Expected array body with {key: string, value?: string} elements' - }) - ) + const getTopology = async (req: Request, res: Response) => { + const topologyInfo = services.peersService().getTopology() + if (topologyInfo.ok) { + if (req.query.format === 'graphviz') { + res.setHeader('Content-Type', 'text/vnd.graphviz') + res.send(toGraphviz(topologyInfo.topology)) + } else { + res.send(topologyInfo) + } } else { - const config = await configService.updateConfigs(configKeyValues) - res.send(config) + res.status(503).send(topologyInfo) } } - registerRoute(app, '/status', HttpMethod.GET, [getStatus]) - registerRoute(app, '/layers', HttpMethod.GET, [getLayers]) - registerRoute(app, '/layers/:layerId', HttpMethod.GET, [validateLayerExists, getByLayerId]) - registerRoute(app, '/layers/:layerId/users', HttpMethod.GET, [validateLayerExists, GetUsersByLayerId]) - registerRoute(app, '/layers/:layerId/rooms', HttpMethod.GET, [validateLayerExists, getRoomsByLayerId]) - registerRoute(app, '/layers/:layerId/rooms/:roomId', HttpMethod.GET, [validateLayerExists, getRoomId]) - registerRoute(app, '/layers/:layerId', HttpMethod.PUT, [ - requireOneOf(['id', 'peerId'], (req) => req.body), - validatePeerToken(getPeerJsRealm), - putLayerId - ]) - registerRoute(app, '/layers/:layerId/rooms/:roomId', HttpMethod.PUT, [ - validateLayerExists, - requireOneOf(['id', 'peerId'], (req) => req.body), - validatePeerToken(getPeerJsRealm), - putRoomId - ]) - registerRoute(app, '/layers/:layerId/rooms/:roomId/users/:userId', HttpMethod.DELETE, [ - validateLayerExists, - validatePeerToken(getPeerJsRealm), - deleteUserFromRoomById - ]) - registerRoute(app, '/layers/:layerId/users/:userId', HttpMethod.DELETE, [ - validateLayerExists, - validatePeerToken(getPeerJsRealm), - deleteUserId - ]) - registerRoute(app, '/layers/:layerId/topology', HttpMethod.GET, [validateLayerExists, getTopology]) + app.get('/status', asyncHandler(getStatus)) - registerRoute(app, '/config', HttpMethod.PUT, [ + app.put('/config', [ requireAll(['config'], (req) => req.body), validateSignatureHandler( (body) => JSON.stringify(body.config), options.ethNetwork, (signer) => signer?.toLowerCase() == options.restrictedAccessSigner.toLowerCase() ), - putConfig + asyncHandler(putConfig) ]) - function registerRoute(app: express.Express, route: string, method: HttpMethod, actions: RequestHandler[]) { - const handlers: RequestHandler[] = [...Metrics.requestHandlers(), ...actions] - switch (method) { - case HttpMethod.GET: - app.get(route, handlers) - break - case HttpMethod.PUT: - app.put(route, handlers) - break - case HttpMethod.DELETE: - app.delete(route, handlers) - break - } - } - - function mapLayerToJson(layer: Layer, globalMaxPerLayer: number | undefined, includeUserParcels: boolean = false) { - return { - name: layer.id, - usersCount: layer.peers.length, - maxUsers: layer.maxPeers ?? globalMaxPerLayer, - ...(includeUserParcels && { - usersParcels: layer.peers.map((it) => peersService.getPeerInfo(it).parcel).filter((it) => !!it) - }) - } - } - - function handleError(err: any, res, next) { - const statusTexts = { - 400: 'bad-request', - 401: 'unauthorized', - 402: 'method-not-allowed', - 403: 'forbidden', - 404: 'not-found' - } - - if (err instanceof RequestError) { - res - .status(err.status) - .send(JSON.stringify({ status: err.statusMessage ?? statusTexts[err.status] ?? 'error', message: err.message })) - } else { - next(err) - } - } - - function mapUsersToJson(user?: PeerInfo[]) { - return user?.map((it) => ({ - id: it.id, - userId: it.id, - protocolVersion: it.protocolVersion, - peerId: it.id, - parcel: it.parcel, - position: it.position, - lastPing: it.lastPing, - address: it.address - })) - } -} - -enum HttpMethod { - GET, - PUT, - DELETE + app.get('/config', asyncHandler(getConfig)) + app.get('/islands', asyncHandler(getIslands)) + app.get('/islands/:islandId', asyncHandler(getIsland)) + app.get('/peers', asyncHandler(getPeers)) + app.get('/peers/topology', asyncHandler(getTopology)) + app.get('/peer-parameters', validateSignatureFromHeaderHandler(options.ethNetwork), asyncHandler(getPeerParameters)) } diff --git a/comms/lighthouse/src/server.ts b/comms/lighthouse/src/server.ts index 643a72b89..e8c579946 100644 --- a/comms/lighthouse/src/server.ts +++ b/comms/lighthouse/src/server.ts @@ -1,35 +1,32 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { authHeaders, DAOContractClient, DECENTRALAND_ADDRESS, initializeMetricsServer } from '@catalyst/commons' +import { DAOContract } from '@catalyst/contracts' +import { COMMS_API } from '@dcl/catalyst-api-specs' import cors from 'cors' -import { Authenticator } from 'dcl-crypto' -import { DECENTRALAND_ADDRESS } from 'decentraland-katalyst-commons/addresses' -import { DAOContractClient } from 'decentraland-katalyst-commons/DAOClient' -import { Metrics } from 'decentraland-katalyst-commons/metrics' -import { DAOContract } from 'decentraland-katalyst-contracts/DAOContract' -import { httpProviderForNetwork } from 'decentraland-katalyst-contracts/utils' import express from 'express' +import * as OpenApiValidator from 'express-openapi-validator' import morgan from 'morgan' import * as path from 'path' -import { ExpressPeerServer, IRealm } from 'peerjs-server' -import { IConfig } from 'peerjs-server/dist/src/config' -import { IdType, MessageType } from 'peerjs-server/dist/src/enums' -import { IClient } from 'peerjs-server/dist/src/models/client' -import { IMessage } from 'peerjs-server/dist/src/models/message' -import { ConfigService } from './configService' -import { DEFAULT_LAYERS } from './default_layers' -import { IdService } from './idService' -import { LayersService } from './layersService' -import { patchLog } from './logging' -import { pickName } from './naming' -import { PeersService } from './peersService' +import { ConfigService } from './config/configService' +import { lighthouseConfigStorage } from './config/simpleStorage' +import { metricsComponent } from './metrics' +import { patchLog } from './misc/logging' +import { pickName } from './misc/naming' +import { IRealm } from './peerjs-server' +import { ArchipelagoService } from './peers/archipelagoService' +import { IdService } from './peers/idService' +import { initPeerJsServer } from './peers/initPeerJsServer' +import { defaultPeerMessagesHandler } from './peers/peerMessagesHandler' +import { peersCheckJob } from './peers/peersCheckJob' +import { PeersService } from './peers/peersService' import { configureRoutes } from './routes' -import { lighthouseConfigStorage } from './simpleStorage' +import { AppServices } from './types' -const LIGHTHOUSE_VERSION = '0.2' +const LIGHTHOUSE_PROTOCOL_VERSION = '1.0.0' const DEFAULT_ETH_NETWORK = 'ropsten' const CURRENT_ETH_NETWORK = process.env.ETH_NETWORK ?? DEFAULT_ETH_NETWORK -;(async function () { +async function main() { const daoClient = new DAOContractClient(DAOContract.withNetwork(CURRENT_ETH_NETWORK)) const name = await pickName(process.env.LIGHTHOUSE_NAMES, daoClient) @@ -41,9 +38,7 @@ const CURRENT_ETH_NETWORK = process.env.ETH_NETWORK ?? DEFAULT_ETH_NETWORK const port = parseInt(process.env.PORT ?? '9000') const noAuth = parseBoolean(process.env.NO_AUTH ?? 'false') const secure = parseBoolean(process.env.SECURE ?? 'false') - const enableMetrics = parseBoolean(process.env.METRICS ?? 'true') - const allowNewLayers = parseBoolean(process.env.ALLOW_NEW_LAYERS ?? 'false') - const existingLayers = process.env.DEFAULT_LAYERS?.split(',').map((it) => it.trim()) ?? DEFAULT_LAYERS + const validateAPI = parseBoolean(process.env.VALIDATE_API ?? 'false') const idAlphabet = process.env.ID_ALPHABET ? process.env.ID_ALPHABET : undefined const idLength = process.env.ID_LENGTH ? parseInt(process.env.ID_LENGTH) : undefined const restrictedAccessAddress = process.env.RESTRICTED_ACCESS_ADDRESS ?? DECENTRALAND_ADDRESS @@ -54,131 +49,88 @@ const CURRENT_ETH_NETWORK = process.env.ETH_NETWORK ?? DEFAULT_ETH_NETWORK const app = express() - if (enableMetrics) { - Metrics.initialize() + const corsOptions: cors.CorsOptions = { + origin: true, + methods: 'GET,HEAD,POST,PUT,DELETE,CONNECT,TRACE,PATCH', + allowedHeaders: ['Cache-Control', 'Content-Type', 'Origin', 'Accept', 'User-Agent', 'X-Peer-Token', ...authHeaders], + credentials: true, + maxAge: 86400 } - const peersService = new PeersService(getPeerJsRealm) - - app.use(cors()) + app.use(cors(corsOptions)) app.use(express.json()) if (accessLogs) { app.use(morgan('combined')) } + if (validateAPI) { + OpenApiValidator.middleware({ + apiSpec: COMMS_API, + validateResponses: process.env.CI == 'true', + validateRequests: true // (default) + }) + } - const configService = new ConfigService(lighthouseConfigStorage) - - const layersService = new LayersService({ peersService, existingLayers, allowNewLayers, configService }) + const configService = await ConfigService.build({ + storage: lighthouseConfigStorage, + globalConfig: { ethNetwork: CURRENT_ETH_NETWORK } + }) const idService = new IdService({ alphabet: idAlphabet, idLength }) - configureRoutes( - app, - { layersService, realmProvider: getPeerJsRealm, peersService, configService }, - { - name, - version: LIGHTHOUSE_VERSION, - ethNetwork: CURRENT_ETH_NETWORK, - restrictedAccessSigner: restrictedAccessAddress, - env: { - secure, - commitHash: process.env.COMMIT_HASH, - catalystVersion: process.env.CATALYST_VERSION - } - } - ) - const server = app.listen(port, async () => { console.info(`==> Lighthouse listening on port ${port}.`) }) - const options: Partial = { - path: '/', - idGenerator: () => idService.nextId(), - authHandler: async (client, message) => { - if (noAuth) { - return true - } - - if (!client) { - // client not registered - return false - } - if ( - client.getIdType() === IdType.SELF_ASSIGNED && - client.getId().toLowerCase() !== message.payload[0]?.payload?.toLowerCase() - ) { - // client id mistmaches with auth signer - return false - } - try { - const provider = httpProviderForNetwork(CURRENT_ETH_NETWORK) - const result = await Authenticator.validateSignature(client.getMsg(), message.payload, provider) - - const address = message.payload[0].payload - - if (!peersService.existsPeerWithAddress(address)) { - peersService.setPeerAddress(client.getId(), message.payload[0].payload) - } else { - client.send({ - type: MessageType.ID_TAKEN, - payload: { msg: 'ETH Address is taken' } - }) - - await client.getSocket()?.close() - return false - } - - return result.ok - } catch (e) { - console.log(`error while recovering address for client ${client.getId()}`, e) - return false - } - } + const appServices: AppServices = { + peersService: () => peersService, + archipelagoService: () => archipelagoService, + configService, + idService } - const peerServer = ExpressPeerServer(server, options) - - peerServer.on('disconnect', (client: any) => { - console.log('User disconnected from server socket. Removing from all rooms & layers: ' + client.id) - layersService.removePeer(client.id) - }) - - peerServer.on('error', console.log) - - //@ts-ignore - peerServer.on('message', (client: IClient, message: IMessage) => { - if (message.type === MessageType.HEARTBEAT && client.isAuthenticated()) { - peersService.updateTopology(client.getId(), message.payload?.connectedPeerIds) - peersService.updatePeerParcel(client.getId(), message.payload?.parcel) - peersService.updatePeerPosition(client.getId(), message.payload?.position) - - if (message.payload?.optimizeNetwork) { - const optimalConnectionsResult = layersService.getOptimalConnectionsFor( - client.getId(), - message.payload.targetConnections, - message.payload.maxDistance - ) - client.send({ - type: 'OPTIMAL_NETWORK_RESPONSE', - src: '__lighthouse_response__', - dst: client.getId(), - payload: optimalConnectionsResult - }) - } - } + const peerServer = initPeerJsServer({ + netServer: server, + noAuth, + ethNetwork: CURRENT_ETH_NETWORK, + messagesHandler: defaultPeerMessagesHandler(appServices), + ...appServices }) function getPeerJsRealm(): IRealm { return peerServer.get('peerjs-realm') } + const metricsServer = initializeMetricsServer(app, metricsComponent) + await metricsServer.start() + + const peersService = new PeersService(getPeerJsRealm, appServices) + + const archipelagoService = new ArchipelagoService(appServices) + + configureRoutes(app, appServices, { + name, + version: LIGHTHOUSE_PROTOCOL_VERSION, + ethNetwork: CURRENT_ETH_NETWORK, + restrictedAccessSigner: restrictedAccessAddress, + env: { + secure, + commitHash: process.env.COMMIT_HASH, + catalystVersion: process.env.CATALYST_VERSION + } + }) + app.use('/peerjs', peerServer) const _static = path.join(__dirname, '../static') app.use('/monitor', express.static(_static + '/monitor')) -})().catch((e) => { + + const peersCheckJobInstance = await peersCheckJob(appServices) + + peersCheckJobInstance.start() +} + +main().catch((e) => { console.error('Exiting process because of unhandled exception', e) process.exit(1) }) diff --git a/comms/lighthouse/src/storageKeys.ts b/comms/lighthouse/src/storageKeys.ts deleted file mode 100644 index 08bf1fb94..000000000 --- a/comms/lighthouse/src/storageKeys.ts +++ /dev/null @@ -1,3 +0,0 @@ -export const StorageKeys = { - PEER_TOKEN: 'peerToken' -} diff --git a/comms/lighthouse/src/types.ts b/comms/lighthouse/src/types.ts index 2a1f75bb1..703ee9df2 100644 --- a/comms/lighthouse/src/types.ts +++ b/comms/lighthouse/src/types.ts @@ -1,11 +1,15 @@ -import { Position } from 'decentraland-katalyst-utils/Positions' +import { Position3D } from '@catalyst/commons' +import { ConfigService } from './config/configService' +import { ArchipelagoService } from './peers/archipelagoService' +import { IdService } from './peers/idService' +import { PeersService } from './peers/peersService' export type PeerInfo = { id: string address?: string protocolVersion?: number parcel?: [number, number] - position?: Position + position?: Position3D layer?: string lastPing?: number } @@ -29,3 +33,23 @@ export type Layer = { maxPeers?: number lastCheckTimestamp: number } + +export type AppServices = { + idService: IdService + configService: ConfigService + peersService: () => PeersService + archipelagoService: () => ArchipelagoService +} + +export interface RTCIceServer { + credential?: string + credentialType?: RTCIceCredentialType + urls: string | string[] + username?: string +} + +export type PeerParameters = Partial<{ + iceServers: RTCIceServer[] +}> + +export type PeerTopologyInfo = { id: string; connectedPeers: string[] } diff --git a/comms/lighthouse/src/utils.ts b/comms/lighthouse/src/utils.ts deleted file mode 100644 index 56fc0d7ef..000000000 --- a/comms/lighthouse/src/utils.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { IPeersService, NotificationType } from './peersService' -import { PeerRequest } from './types' - -type PeerContainer = { - peers: string[] -} - -//This function seems to signal the need for an abstraction, but it may be added later in a refactor -export function removePeerAndNotify( - containers: Record, - containerId: string, - peerId: string, - notificationType: NotificationType, - containerKey: string, - peersService?: IPeersService, - deleteIfEmpty: boolean = true -): { container: T; removed: boolean } { - const container = containers[containerId] - let removed = false - if (container) { - const index = container.peers.indexOf(peerId) - if (index !== -1) { - container.peers.splice(index, 1) - removed = true - - peersService?.notifyPeersById(container.peers, notificationType, { - id: peerId, - userId: peerId, - peerId, - [containerKey]: containerId - }) - } - - if (container.peers.length === 0 && deleteIfEmpty) { - delete containers[containerId] - } - } - - return { container, removed } -} - -export function getPeerId(peer: PeerRequest): string { - return (peer.id ?? peer.peerId)! -} diff --git a/comms/lighthouse/static/monitor/index.html b/comms/lighthouse/static/monitor/index.html deleted file mode 100644 index b842f7eb8..000000000 --- a/comms/lighthouse/static/monitor/index.html +++ /dev/null @@ -1,12 +0,0 @@ - - - - - Decentraland Lighthouse - - - -
- - - diff --git a/comms/lighthouse/static/monitor/src/monitor.tsx b/comms/lighthouse/static/monitor/src/monitor.tsx deleted file mode 100644 index e7f54bb0b..000000000 --- a/comms/lighthouse/static/monitor/src/monitor.tsx +++ /dev/null @@ -1,90 +0,0 @@ -import React, { useState, useEffect } from 'react' -import ReactDOM from 'react-dom' - -import Viz from 'viz.js' -import { Module, render } from 'viz.js/full.render.js' - -let viz = new Viz({ Module, render }) - -type Layer = { - name: string - maxUsers: number - usersCount: number -} - -function LayerSelector(props: { layers: Layer[]; onSelected: (layer: Layer) => any }) { - return ( -
- -
- ) -} - -function LayerTopologyViewer(props: { layer: Layer }) { - const [topology, setTopology] = useState(undefined) - useEffect(() => { - ;(async () => { - const topologyResponse = await fetch(`../layers/${props.layer.name}/topology?format=graphviz`) - const topologyText = await topologyResponse.text() - - setTopology(topologyText) - })() - }, []) - - useEffect(() => { - if (topology) { - viz - .renderSVGElement(topology) - .then((element) => document.getElementById('viz-container')!.appendChild(element)) - .catch((error) => { - // Create a new Viz instance (@see Caveats page for more info) - viz = new Viz() - - const element = document.createElement('p') - const text = document.createTextNode('Error while rendering layer') - element.appendChild(text) - document.getElementById('viz-container')!.appendChild(element) - - console.error(error) - }) - } - }, [topology]) - - return
-} - -function App() { - const [layers, setLayers] = useState([]) - const [currentLayer, setCurrentLayer] = useState() - - useEffect(() => { - ;(async () => { - const layersResponse = await fetch('../layers') - const layersList = await layersResponse.json() - - setLayers(layersList) - })() - }, []) - - return ( -
- - {currentLayer && } -
- ) -} - -export default function renderApp() { - ReactDOM.render(, document.getElementById('root')) -} - -renderApp() diff --git a/comms/lighthouse/test/archipelagoService.spec.ts b/comms/lighthouse/test/archipelagoService.spec.ts new file mode 100644 index 000000000..cf3ef8a8b --- /dev/null +++ b/comms/lighthouse/test/archipelagoService.spec.ts @@ -0,0 +1,122 @@ +import { untilTrue } from '@catalyst/commons' +import { Island } from '@dcl/archipelago' +import { ConfigService, LighthouseConfig } from '../src/config/configService' +import { ArchipelagoService } from '../src/peers/archipelagoService' +import { PeersService } from '../src/peers/peersService' +import { PeerOutgoingMessageType } from '../src/peers/protocol/messageTypes' +import { AppServices } from '../src/types' + +describe('Archipelago service', () => { + function mockedArchipelagoParams( + onIslandChanged: (peerChangingId: string, island: Island, fromIsland: Island | undefined) => any = () => {}, + onUpdateSentToIsland: ( + peerId: string, + island: Island, + type: PeerOutgoingMessageType.PEER_JOINED_ISLAND | PeerOutgoingMessageType.PEER_LEFT_ISLAND + ) => any = () => {} + ): Pick { + return { + configService: { + listenTo(_config: LighthouseConfig, _listener: (newValue: T) => void): void { + // Nothing to do + }, + + get(config: LighthouseConfig): T { + if (config.name === LighthouseConfig.ARCHIPELAGO_FLUSH_FREQUENCY.name) return 0.05 as any + return config.defaultValue + } + } as ConfigService, + peersService: () => + ({ + sendUpdateToIsland( + peerId: string, + island: Island, + type: PeerOutgoingMessageType.PEER_JOINED_ISLAND | PeerOutgoingMessageType.PEER_LEFT_ISLAND + ) { + onUpdateSentToIsland(peerId, island, type) + }, + notifyIslandChange(peerChangingId: string, island: Island, fromIsland: Island | undefined) { + onIslandChanged(peerChangingId, island, fromIsland) + } + } as PeersService) + } + } + + function setPeersPositions(service: ArchipelagoService, ...positions: [string, number, number, number][]) { + for (const [id, ...pos] of positions) { + service.updatePeerPosition(id, { position: pos }) + } + } + + it('should respond if two peers are on the same island', async () => { + const processedPeerUpdates: string[] = [] + + const service = new ArchipelagoService( + mockedArchipelagoParams((peerChangingId) => processedPeerUpdates.push(peerChangingId)) + ) + + setPeersPositions(service, ['peer1', 0, 0, 0], ['peer2', 0, 0, 0], ['peer3', 1000, 1000, 1000]) + + await untilTrue( + () => processedPeerUpdates.length === 3, + "All peers should have received island updates but they didn't. Received peer updates: " + + processedPeerUpdates.join(', ') + ) + + expect(await service.areInSameIsland('peer1', 'peer2')).toBe(true) + expect(await service.areInSameIsland('peer1', 'peer3')).toBe(false) + expect(await service.areInSameIsland('peer2', 'peer3')).toBe(false) + expect(await service.areInSameIsland('peer1', 'peer2', 'peer3')).toBe(false) + }) + + it('should notify island of updates', async () => { + const notifiedIslandChanges: { peerId: string; island: Island; fromIsland: Island | undefined }[] = [] + const updatesSentDirectly: { peerId: string; island: Island; type: PeerOutgoingMessageType }[] = [] + + function findForPeer(id: string, items: T[]): T { + return items.find((it) => it.peerId === id)! + } + + const service = new ArchipelagoService( + mockedArchipelagoParams( + (peerId, island, fromIsland) => notifiedIslandChanges.push({ peerId, island, fromIsland }), + (peerId, island, type) => updatesSentDirectly.push({ peerId, island, type }) + ) + ) + + setPeersPositions(service, ['peer1', 0, 0, 0], ['peer2', 0, 0, 0], ['peer3', 1000, 1000, 1000]) + + await untilTrue( + () => notifiedIslandChanges.length === 3, + "Should have received islands updates but didn't. Updates received: " + JSON.stringify(notifiedIslandChanges) + ) + + expect(notifiedIslandChanges.length).toEqual(3) + const peer1Island = findForPeer('peer1', notifiedIslandChanges).island.id + expect(peer1Island).toEqual(findForPeer('peer2', notifiedIslandChanges).island.id) + expect(peer1Island).not.toEqual(findForPeer('peer3', notifiedIslandChanges).island.id) + + notifiedIslandChanges.length = 0 + setPeersPositions(service, ['peer3', 10, 0, 10]) + + await untilTrue( + () => notifiedIslandChanges.length === 1, + "Should have received an island update for peer3 but didn't. Received updates: " + + JSON.stringify(notifiedIslandChanges) + ) + + expect(findForPeer('peer3', notifiedIslandChanges).island.id).toEqual(peer1Island) + + service.clearPeer('peer2') + + await untilTrue( + () => updatesSentDirectly.length === 1, + "Should have received a direct update but didn't. Updates received: " + JSON.stringify(updatesSentDirectly) + ) + + const update = findForPeer('peer2', updatesSentDirectly) + + expect(update.type).toEqual(PeerOutgoingMessageType.PEER_LEFT_ISLAND) + expect(update.island.id).toEqual(peer1Island) + }) +}) diff --git a/comms/lighthouse/test/configService.spec.ts b/comms/lighthouse/test/configService.spec.ts new file mode 100644 index 000000000..884abf8bf --- /dev/null +++ b/comms/lighthouse/test/configService.spec.ts @@ -0,0 +1,271 @@ +import { Config, ConfigService, EnvironmentWrapper, LighthouseConfig } from '../src/config/configService' +import { ISimpleStorage } from '../src/config/simpleStorage' + +describe('Config service', () => { + it('When the config service is built, then environment takes precedence over all', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + + const service = await buildServiceWith({ + env: envWrapperWith(config, '5'), + storage: storageWith(config, 10), + global: globalConfigWith(config, 15) + }) + const value = service.get(config) + + expect(value).toEqual(5) + }) + + it('When the config service is built, then storage takes precedence over global and defaults', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: storageWith(config, 10), + global: globalConfigWith(config, 15) + }) + const value = service.get(config) + + expect(value).toEqual(10) + }) + + it('When the config service is built, then global takes precedence over defaults', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: emptyStorage(), + global: globalConfigWith(config, 15) + }) + const value = service.get(config) + + expect(value).toEqual(15) + }) + + it('When the config service is built and no values were set, then defaults are used', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: emptyStorage(), + global: emptyGlobalConfig() + }) + const value = service.get(config) + + expect(value).toEqual(config.defaultValue) + }) + + it('When global config is updated, then config service reports it correctly', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + const globalConfig = globalConfigWith(config, 20) + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: emptyStorage(), + global: globalConfig + }) + + // Update global config + globalConfig.setConfig(config, 30) + await service.updateConfig() + const value = service.get(config) + + expect(value).toEqual(30) + }) + + it('When storage config is updated, then config service reports it correctly', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + const storageConfig = storageWith(config, 20) + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: storageConfig, + global: emptyGlobalConfig() + }) + + // Update storage config + storageConfig.setConfig(config, 30) + await service.updateConfig() + const value = service.get(config) + + expect(value).toEqual(30) + }) + + it('When a change happens in global config, then the appropriate listener is called correctly', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + const globalConfig = globalConfigWith(config, 20) + let listenedMaxPeers: number | undefined = undefined + let listenedJoinDistance: number | undefined = undefined + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: emptyStorage(), + global: globalConfig + }) + + // Add listeners + service.listenTo(config, (newValue) => (listenedMaxPeers = newValue)) + service.listenTo(LighthouseConfig.ARCHIPELAGO_JOIN_DISTANCE, (newValue) => (listenedJoinDistance = newValue)) + + // Update global config + globalConfig.setConfig(config, 30) + await service.updateConfig() + + expect(listenedMaxPeers!).toEqual(30) + expect(listenedJoinDistance).toBeUndefined() + }) + + it('When a change happens in storage config, then the appropriate listener is called correctly', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + const storageConfig = storageWith(config, 20) + let listenedMaxPeers: number | undefined = undefined + let listenedJoinDistance: number | undefined = undefined + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: storageConfig, + global: emptyGlobalConfig() + }) + + // Add listeners + service.listenTo(config, (newValue) => (listenedMaxPeers = newValue)) + service.listenTo(LighthouseConfig.ARCHIPELAGO_JOIN_DISTANCE, (newValue) => (listenedJoinDistance = newValue)) + + // Update storage config + storageConfig.setConfig(config, 30) + await service.updateConfig() + + expect(listenedMaxPeers!).toEqual(30) + expect(listenedJoinDistance).toBeUndefined() + }) + + it('When storage config is deleted, then config service reports it correctly', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + const storageConfig = storageWith(config, 20) + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: storageConfig, + global: emptyGlobalConfig() + }) + + // Delete storage config + await service.updateStorageConfigs([{ key: config.name }]) + const value = service.get(config) + + expect(value).toEqual(config.defaultValue) + }) + + it('When global config is deleted, then config service reports it correctly', async () => { + const config = LighthouseConfig.MAX_PEERS_PER_ISLAND + const globalConfig = globalConfigWith(config, 20) + + const service = await buildServiceWith({ + env: emptyWrapper(), + storage: emptyStorage(), + global: globalConfig + }) + + // Delete global config + globalConfig.deleteConfig(config) + await service.updateConfig() + const value = service.get(config) + + expect(value).toEqual(config.defaultValue) + }) +}) + +function buildServiceWith({ + env, + global, + storage +}: { + env: EnvironmentWrapper + global: CustomGlobalConfig + storage: CustomStorage +}): Promise { + return ConfigService.build({ + storage, + globalConfig: { fetch: () => global.getAllConfig() }, + envWrapper: env + }) +} + +function globalConfigWith(config: LighthouseConfig, value: T): CustomGlobalConfig { + const globalConfig = emptyGlobalConfig() + globalConfig.setConfig(config, value) + return globalConfig +} + +function emptyGlobalConfig(): CustomGlobalConfig { + return new CustomGlobalConfig() +} + +function emptyWrapper(): EnvironmentWrapper { + return { + isInEnv: (_) => false, + readFromEnv: (_) => { + throw new Error('Should never get here') + } + } +} + +function envWrapperWith(config: LighthouseConfig, value: string): EnvironmentWrapper { + return { + isInEnv: (key) => key === LighthouseConfig.toEnvironmentName(config.name), + readFromEnv: (_) => value + } +} + +function emptyStorage(): CustomStorage { + return new CustomStorage() +} + +function storageWith(config: LighthouseConfig, value: T): CustomStorage { + const storage = emptyStorage() + storage.setConfig(config, value) + return storage +} + +class CustomStorage implements ISimpleStorage { + private readonly values: Config = {} + setConfig(config: LighthouseConfig, value: T) { + this.values[config.name] = value + } + + getAll(): Promise { + return Promise.resolve(this.values) + } + setString(key: string, value: string): Promise { + this.values[key] = value + return Promise.resolve() + } + deleteKey(key: string): Promise { + delete this.values[key] + return Promise.resolve() + } + getString(key: string): Promise { + throw new Error('Method not implemented.') + } + getOrSetString(key: string, value: string): Promise { + throw new Error('Method not implemented.') + } + clear(): Promise { + throw new Error('Method not implemented.') + } +} + +class CustomGlobalConfig { + private readonly values: Config = {} + + setConfig(config: LighthouseConfig, value: T) { + this.values[config.name] = value + } + + deleteConfig(config: LighthouseConfig) { + delete this.values[config.name] + } + + getAllConfig(): Promise { + return Promise.resolve(this.values) + } +} diff --git a/comms/lighthouse/test/handlers.spec.ts b/comms/lighthouse/test/handlers.spec.ts index 36bee4599..790cfae18 100644 --- a/comms/lighthouse/test/handlers.spec.ts +++ b/comms/lighthouse/test/handlers.spec.ts @@ -1,4 +1,4 @@ -import { requireAll, requireOneOf, validatePeerToken } from '../src/handlers' +import { requireAll, requireOneOf, validatePeerToken } from '../src/misc/handlers' describe('require parameters', () => { let request: any diff --git a/comms/lighthouse/test/helpers/reporter.ts b/comms/lighthouse/test/helpers/reporter.ts new file mode 100644 index 000000000..a740c3478 --- /dev/null +++ b/comms/lighthouse/test/helpers/reporter.ts @@ -0,0 +1,3 @@ +import { installReporter } from '@catalyst/commons' + +installReporter() diff --git a/comms/lighthouse/test/idService.spec.ts b/comms/lighthouse/test/idService.spec.ts index 56bd44dee..6ac6f68e6 100644 --- a/comms/lighthouse/test/idService.spec.ts +++ b/comms/lighthouse/test/idService.spec.ts @@ -1,5 +1,5 @@ import express from 'express' -import { IdService } from '../src/idService' +import { IdService } from '../src/peers/idService' require('isomorphic-fetch') @@ -11,7 +11,7 @@ describe('id service generation', function () { beforeEach(() => { originalTimeout = jasmine.DEFAULT_TIMEOUT_INTERVAL jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000 - idService = new IdService() + idService = new IdService({ idLength: 2 }) }) afterEach(function () { diff --git a/comms/lighthouse/test/index.spec.ts b/comms/lighthouse/test/index.spec.ts deleted file mode 100644 index 5885526d7..000000000 --- a/comms/lighthouse/test/index.spec.ts +++ /dev/null @@ -1,5 +0,0 @@ -describe('tests in jasmine running on karma', function () { - it(`works correctly`, () => { - expect(true).toBe(true) - }) -}) diff --git a/comms/lighthouse/test/naming.spec.ts b/comms/lighthouse/test/naming.spec.ts index 9f208010d..411c4cdf9 100644 --- a/comms/lighthouse/test/naming.spec.ts +++ b/comms/lighthouse/test/naming.spec.ts @@ -1,22 +1,19 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ // Because of Bazel sandboxing, we need this for the time being process.env.LIGHTHOUSE_STORAGE_LOCATION = '.' -import { DAOClient } from 'decentraland-katalyst-commons/DAOClient' -import { ServerMetadata } from 'decentraland-katalyst-commons/ServerMetadata' -import { defaultNames, pickName } from '../src/naming' -import { lighthouseStorage } from '../src/simpleStorage' +import { DAOClient, ServerMetadata } from '@catalyst/commons' +import { lighthouseStorage } from '../src/config/simpleStorage' +import { defaultNames, pickName } from '../src/misc/naming' declare let global: any const oldFetch = global.fetch -// @ts-ignore const daoClient: DAOClient = { async getAllServers(): Promise> { - return new Set([{ id: 'id', address: '0x...', owner: '0x...' }]) + return new Set([{ id: 'id', baseUrl: '0x...', owner: '0x...' }]) } -} +} as DAOClient let existingName = 'fenrir' diff --git a/comms/lighthouse/test/peerjs-server/messageHandler/handlers/heartbeat/index.spec.ts b/comms/lighthouse/test/peerjs-server/messageHandler/handlers/heartbeat/index.spec.ts new file mode 100644 index 000000000..0eece48d4 --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/messageHandler/handlers/heartbeat/index.spec.ts @@ -0,0 +1,15 @@ +import { HeartbeatHandler } from '../../../../../src/peerjs-server/messageHandler/handlers' +import { createClient } from '../../../utils' + +describe('Heartbeat handler', () => { + it('should update last ping time', () => { + const client = createClient({}) + client.setLastPing(0) + + const nowTime = new Date().getTime() + + HeartbeatHandler(client).catch(console.error) + + expect(client.getLastPing() - nowTime).toBeLessThanOrEqual(1000) + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/messageHandler/handlers/transmission/index.spec.ts b/comms/lighthouse/test/peerjs-server/messageHandler/handlers/transmission/index.spec.ts new file mode 100644 index 000000000..32a90efdd --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/messageHandler/handlers/transmission/index.spec.ts @@ -0,0 +1,130 @@ +import { MessageType } from '../../../../../src/peerjs-server/enums' +import { TransmissionHandler } from '../../../../../src/peerjs-server/messageHandler/handlers' +import { Realm } from '../../../../../src/peerjs-server/models/realm' +import { MyWebSocket } from '../../../../../src/peerjs-server/services/webSocketServer/webSocket' +import { createClient } from '../../../utils' + +const createFakeSocket = (): MyWebSocket => { + /* eslint-disable @typescript-eslint/no-empty-function */ + const sock = { + send: (): void => {}, + close: (): void => {}, + on: (): void => {} + } + /* eslint-enable @typescript-eslint/no-empty-function */ + return sock as any +} + +describe('Transmission handler', () => { + it('should save message in queue when destination client not connected', () => { + const realm = new Realm() + const handleTransmission = TransmissionHandler({ realm }) + + const clientFrom = createClient({ id: 'id1' }) + const idTo = 'id2' + realm.setClient(clientFrom, clientFrom.getId()) + + handleTransmission(clientFrom, { type: MessageType.OFFER, src: clientFrom.getId(), dst: idTo }).catch(console.error) + + expect(realm.getMessageQueueById(idTo)?.getMessages().length).toEqual(1) + }) + + it('should not save LEAVE and EXPIRE messages in queue when destination client not connected', () => { + const realm = new Realm() + const handleTransmission = TransmissionHandler({ realm }) + + const clientFrom = createClient({ id: 'id1' }) + const idTo = 'id2' + realm.setClient(clientFrom, clientFrom.getId()) + + handleTransmission(clientFrom, { type: MessageType.LEAVE, src: clientFrom.getId(), dst: idTo }).catch(console.error) + handleTransmission(clientFrom, { type: MessageType.EXPIRE, src: clientFrom.getId(), dst: idTo }).catch( + console.error + ) + + expect(realm.getMessageQueueById(idTo)).toBeUndefined() + }) + + it('should send message to destination client when destination client connected', () => { + const realm = new Realm() + const handleTransmission = TransmissionHandler({ realm }) + + const clientFrom = createClient({ id: 'id1' }) + const clientTo = createClient({ id: 'id2' }) + const socketTo = createFakeSocket() + clientTo.setSocket(socketTo) + realm.setClient(clientTo, clientTo.getId()) + + let sent = false + socketTo.send = (): void => { + sent = true + } + + handleTransmission(clientFrom, { type: MessageType.OFFER, src: clientFrom.getId(), dst: clientTo.getId() }).catch( + console.error + ) + + expect(sent).toBe(true) + }) + + it('should send LEAVE message to source client when sending to destination client failed', () => { + const realm = new Realm() + const handleTransmission = TransmissionHandler({ realm }) + + const clientFrom = createClient({ id: 'id1' }) + const clientTo = createClient({ id: 'id2' }) + const socketFrom = createFakeSocket() + const socketTo = createFakeSocket() + clientFrom.setSocket(socketFrom) + clientTo.setSocket(socketTo) + realm.setClient(clientFrom, clientFrom.getId()) + realm.setClient(clientTo, clientTo.getId()) + + let sent = false + socketFrom.send = (data: string): void => { + if (JSON.parse(data)?.type === MessageType.LEAVE) { + sent = true + } + } + + socketTo.send = (): void => { + throw Error() + } + + handleTransmission(clientFrom, { type: MessageType.OFFER, src: clientFrom.getId(), dst: clientTo.getId() }).catch( + console.error + ) + + expect(sent).toBe(true) + }) + + it('should filter a transmission message when a filter is provided', async () => { + const realm = new Realm() + const filter = async (src: string, dst: string) => src == 'id1' && dst == 'id2' + + const handleTransmission = TransmissionHandler({ realm, transmissionFilter: filter }) + + const clientFrom = createClient({ id: 'id1' }) + const clientTo = createClient({ id: 'id2' }) + const socketTo = createFakeSocket() + clientTo.setSocket(socketTo) + realm.setClient(clientFrom, clientFrom.getId()) + realm.setClient(clientTo, clientTo.getId()) + + let sent = false + socketTo.send = async (data: string) => { + const { src, dst } = JSON.parse(data) + if (await filter(src, dst)) { + sent = true + } else { + throw Error('This message should have been filtered: ' + data) + } + } + + await handleTransmission(clientFrom, { type: MessageType.OFFER, src: clientFrom.getId(), dst: clientTo.getId() }) + + expect(sent).toBe(true) + + await handleTransmission(clientFrom, { type: MessageType.OFFER, src: clientFrom.getId(), dst: 'asd' }) + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/messageHandler/handlersRegistry.ts b/comms/lighthouse/test/peerjs-server/messageHandler/handlersRegistry.ts new file mode 100644 index 000000000..aa9c8b8ec --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/messageHandler/handlersRegistry.ts @@ -0,0 +1,22 @@ +import { MessageType } from '../../../src/peerjs-server/enums' +import { Handler } from '../../../src/peerjs-server/messageHandler/handler' +import { HandlersRegistry } from '../../../src/peerjs-server/messageHandler/handlersRegistry' + +describe('HandlersRegistry', () => { + it('should execute handler for message type', () => { + const handlersRegistry = new HandlersRegistry() + + let handled = false + + const handler: Handler = async (): Promise => { + handled = true + return true + } + + handlersRegistry.registerHandler(MessageType.OPEN, handler) + + handlersRegistry.handle(undefined, { type: MessageType.OPEN, src: 'src', dst: 'dst' }).catch(console.error) + + expect(handled).toBeTrue() + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/models/messageQueue.spec.ts b/comms/lighthouse/test/peerjs-server/models/messageQueue.spec.ts new file mode 100644 index 000000000..dc8879d82 --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/models/messageQueue.spec.ts @@ -0,0 +1,63 @@ +import { MessageType } from '../../../src/peerjs-server/enums' +import { IMessage } from '../../../src/peerjs-server/models/message' +import { MessageQueue } from '../../../src/peerjs-server/models/messageQueue' +import { wait } from '../utils' + +describe('MessageQueue', () => { + const createTestMessage = (): IMessage => { + return { + type: MessageType.OPEN, + src: 'src', + dst: 'dst' + } + } + + describe('#addMessage', () => { + it('should add message to queue', () => { + const queue = new MessageQueue() + queue.addMessage(createTestMessage()) + expect(queue.getMessages().length).toEqual(1) + }) + }) + + describe('#readMessage', () => { + it('should return undefined for empty queue', () => { + const queue = new MessageQueue() + expect(queue.readMessage()).toBeUndefined() + }) + + it('should return message if any exists in queue', () => { + const queue = new MessageQueue() + const message = createTestMessage() + queue.addMessage(message) + + expect(queue.readMessage()).toEqual(message) + expect(queue.readMessage()).toBeUndefined() + }) + }) + + describe('#getLastReadAt', () => { + it('should not be changed if no messages when read', () => { + const queue = new MessageQueue() + const lastReadAt = queue.getLastReadAt() + queue.readMessage() + expect(queue.getLastReadAt()).toEqual(lastReadAt) + }) + + it('should be changed when read message', async () => { + const queue = new MessageQueue() + const lastReadAt = queue.getLastReadAt() + queue.addMessage(createTestMessage()) + + await wait(15) + + expect(queue.getLastReadAt()).toEqual(lastReadAt) + + queue.readMessage() + + // setTimeout is not as precise as one would like, so we cannot test exact milliseconds here. + // We assume it should be greater than at least the previous + 5 + expect(queue.getLastReadAt()).toBeGreaterThanOrEqual(lastReadAt + 5) + }) + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/models/realm.spec.ts b/comms/lighthouse/test/peerjs-server/models/realm.spec.ts new file mode 100644 index 000000000..a1c38e541 --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/models/realm.spec.ts @@ -0,0 +1,48 @@ +import { Realm } from '../../../src/peerjs-server/models/realm' +import { createClient } from '../utils' + +describe('Realm', () => { + describe('#generateClientId', () => { + it('should generate a 36-character UUID', () => { + const realm = new Realm() + expect(realm.generateClientId().length).toEqual(36) + }) + }) + + describe('#setClient', () => { + it('should add client to realm', () => { + const realm = new Realm() + const client = createClient() + + realm.setClient(client, 'id') + expect(realm.getClientsIds()).toEqual(['id']) + }) + }) + + describe('#removeClientById', () => { + it('should remove client from realm', () => { + const realm = new Realm() + const client = createClient() + + realm.setClient(client, 'id') + realm.removeClientById('id') + + expect(realm.getClientById('id')).toBeUndefined() + }) + }) + + describe('#getClientsIds', () => { + it('should reflects on add/remove childs', () => { + const realm = new Realm() + const client = createClient() + + realm.setClient(client, 'id') + expect(realm.getClientsIds()).toEqual(['id']) + + expect(realm.getClientById('id')).toEqual(client) + + realm.removeClientById('id') + expect(realm.getClientsIds()).toEqual([]) + }) + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/services/checkBrokenConnections/index.spec.ts b/comms/lighthouse/test/peerjs-server/services/checkBrokenConnections/index.spec.ts new file mode 100644 index 000000000..92799382c --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/services/checkBrokenConnections/index.spec.ts @@ -0,0 +1,50 @@ +import { Realm } from '../../../../src/peerjs-server/models/realm' +import { CheckBrokenConnections } from '../../../../src/peerjs-server/services/checkBrokenConnections' +import { createClient, wait } from '../../utils' + +describe('CheckBrokenConnections', () => { + it('should remove client after 2 checks', async () => { + const realm = new Realm() + const doubleCheckTime = 55 //~ equals to checkBrokenConnections.checkInterval * 2 + const checkBrokenConnections = new CheckBrokenConnections({ + realm, + config: { alive_timeout: doubleCheckTime }, + checkInterval: 30 + }) + const client = createClient() + realm.setClient(client, 'id') + + checkBrokenConnections.start() + + await wait(checkBrokenConnections.checkInterval * 2 + 30) + + expect(realm.getClientById('id')).toBeUndefined() + + checkBrokenConnections.stop() + }) + + it('should remove client after 1 ping', async () => { + const realm = new Realm() + const doubleCheckTime = 55 //~ equals to checkBrokenConnections.checkInterval * 2 + const checkBrokenConnections = new CheckBrokenConnections({ + realm, + config: { alive_timeout: doubleCheckTime }, + checkInterval: 30 + }) + const client = createClient() + realm.setClient(client, 'id') + + checkBrokenConnections.start() + + //set ping after first check + await wait(checkBrokenConnections.checkInterval) + + client.setLastPing(new Date().getTime()) + + await wait(checkBrokenConnections.checkInterval * 2 + 10) + + expect(realm.getClientById('id')).toBeUndefined() + + checkBrokenConnections.stop() + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/services/messagesExpire/index.spec.ts b/comms/lighthouse/test/peerjs-server/services/messagesExpire/index.spec.ts new file mode 100644 index 000000000..b92e76c66 --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/services/messagesExpire/index.spec.ts @@ -0,0 +1,77 @@ +import defaultConfig from '../../../../src/peerjs-server/config' +import { MessageType } from '../../../../src/peerjs-server/enums' +import { MessageHandler } from '../../../../src/peerjs-server/messageHandler' +import { IMessage } from '../../../../src/peerjs-server/models/message' +import { Realm } from '../../../../src/peerjs-server/models/realm' +import { MessagesExpire } from '../../../../src/peerjs-server/services/messagesExpire' +import { createClient, wait } from '../../utils' + +describe('MessagesExpire', () => { + const createTestMessage = (): IMessage => { + return { + type: MessageType.OPEN, + src: 'src', + dst: 'dst' + } + } + + it('should remove client if no read from queue', async () => { + const realm = new Realm() + const messageHandler = new MessageHandler(realm, defaultConfig) + const checkInterval = 10 + const expireTimeout = 50 + const config = { cleanup_out_msgs: checkInterval, expire_timeout: expireTimeout } + + const messagesExpire = new MessagesExpire({ realm, config, messageHandler }) + + const client = createClient() + realm.setClient(client, 'id') + realm.addMessageToQueue(client.getId(), createTestMessage()) + + messagesExpire.startMessagesExpiration() + + await wait(checkInterval * 2) + + expect(realm.getMessageQueueById(client.getId())?.getMessages().length).toEqual(1) + + await wait(expireTimeout) + + expect(realm.getMessageQueueById(client.getId())).toBeUndefined() + + messagesExpire.stopMessagesExpiration() + }) + + it('should fire EXPIRE message', async () => { + const realm = new Realm() + const messageHandler = new MessageHandler(realm, defaultConfig) + const checkInterval = 10 + const expireTimeout = 50 + const config = { cleanup_out_msgs: checkInterval, expire_timeout: expireTimeout } + + const messagesExpire = new MessagesExpire({ realm, config, messageHandler }) + + const client = createClient() + realm.setClient(client, 'id') + realm.addMessageToQueue(client.getId(), createTestMessage()) + + let handled = false + + messageHandler.handle = async (client, message): Promise => { + expect(client).toBeUndefined + expect(message.type).toEqual(MessageType.EXPIRE) + + handled = true + + return true + } + + messagesExpire.startMessagesExpiration() + + await wait(checkInterval * 2) + await wait(expireTimeout) + + expect(handled).toBe(true) + + messagesExpire.stopMessagesExpiration() + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/services/webSocketServer/index.spec.ts b/comms/lighthouse/test/peerjs-server/services/webSocketServer/index.spec.ts new file mode 100644 index 000000000..4e03634b8 --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/services/webSocketServer/index.spec.ts @@ -0,0 +1,248 @@ +import { Server, WebSocket } from 'mock-socket' +import { Errors, MessageType } from '../../../../src/peerjs-server/enums' +import { Realm } from '../../../../src/peerjs-server/models/realm' +import { WebSocketServer } from '../../../../src/peerjs-server/services/webSocketServer' +import { numericIdGenerator } from '../../../../src/peerjs-server/utils/idgenerator' +import { wait } from '../../utils' + +type Destroyable = T & { destroy?: () => Promise } + +const checkOpen = async (c: WebSocket): Promise => { + return new Promise((resolve) => { + c.onmessage = (event: any & { data?: string }): void => { + c.onmessage = () => {} + const message = JSON.parse(event.data as string) + resolve(message.type === MessageType.OPEN) + } + }) +} + +const checkSequence = async ( + c: WebSocket, + msgs: { type: MessageType; error?: Errors; payloadCheck?: (payload: any) => boolean }[] +): Promise => { + return new Promise((resolve) => { + const restMessages = [...msgs] + + const finish = (success = false): void => { + c.onmessage = () => {} + resolve(success) + } + + c.onmessage = (event: any & { data?: string }): void => { + const [mes] = restMessages + + if (!mes) { + return finish() + } + + restMessages.shift() + + const message = JSON.parse(event.data as string) + if (message.type !== mes.type) { + return finish() + } + + const isOk = !mes.error || message.payload?.msg === mes.error + + if (!isOk) { + return finish() + } + + const payloadOk = !mes.payloadCheck || mes.payloadCheck(message.payload) + + if (!payloadOk) { + return finish() + } + + if (restMessages.length === 0) { + finish(true) + } + } + }) +} + +const createTestServer = ({ + realm, + config, + url +}: { + realm: Realm + config: { path: string; key: string; concurrent_limit: number; idGenerator: () => string } + url: string +}): Destroyable => { + const server = new Server(url) + const webSocketServer: Destroyable = new WebSocketServer({ + server, + realm, + config: { ...config, maxIdIterations: 100000 } + }) + + server.on('connection', (socket: WebSocket & { on?: (eventName: string, callback: () => void) => void }) => { + const s = webSocketServer.socketServer + s.emit('connection', socket, { url: socket.url }) + + socket.onclose = (): void => { + const userId = socket.url + .split('?')[1] + ?.split('&') + .find((p) => p.startsWith('id')) + ?.split('=')[1] + + if (!userId) return + + const client = realm.getClientById(userId) + + const clientSocket = client?.getSocket() + + if (!clientSocket) return + ;(clientSocket as unknown as WebSocket).listeners['server::close']?.forEach((s: () => void) => s()) + } + + socket.onmessage = (event: any & { data?: string }): void => { + const userId = socket.url + .split('?')[1] + ?.split('&') + .find((p) => p.startsWith('id')) + ?.split('=')[1] + + if (!userId) return + + const client = realm.getClientById(userId) + + const clientSocket = client?.getSocket() + + if (!clientSocket) return + ;(clientSocket as unknown as WebSocket).listeners['server::message']?.forEach((s: (data: any) => void) => + s(event) + ) + } + }) + + webSocketServer.destroy = async (): Promise => { + return new Promise((resolve) => { + server.close() + server.stop(() => resolve()) + }) + } + + return webSocketServer +} + +describe('WebSocketServer', () => { + it('should return valid path', () => { + const realm = new Realm() + const config = { + path: '/', + key: 'testKey', + concurrent_limit: 1, + idGenerator: numericIdGenerator(), + maxIdIterations: 100000 + } + const config2 = { ...config, path: 'path' } + const server = new Server('path1') + const server2 = new Server('path2') + + const webSocketServer = new WebSocketServer({ server, realm, config }) + + expect(webSocketServer.path).toEqual('/peerjs') + + const webSocketServer2 = new WebSocketServer({ server: server2, realm, config: config2 }) + + expect(webSocketServer2.path).toEqual('path/peerjs') + + server.stop() + server2.stop() + }) + + it(`should check client's params`, async () => { + const realm = new Realm() + const config = { path: '/', key: 'testKey', concurrent_limit: 1, idGenerator: numericIdGenerator() } + const fakeURL = 'ws://localhost:8080/peerjs' + + const getError = async (url: string, validError: Errors = Errors.INVALID_WS_PARAMETERS): Promise => { + const webSocketServer = createTestServer({ url, realm, config }) + + const ws = new WebSocket(url) + const errorSent = await checkSequence(ws, [{ type: MessageType.ERROR, error: validError }]) + + ws.close() + + await webSocketServer.destroy?.() + + return errorSent + } + + expect(await getError(fakeURL)).toBe(true) + expect(await getError(`${fakeURL}?key=${config.key}`)).toBe(true) + expect(await getError(`${fakeURL}?key=${config.key}&id=1`)).toBe(true) + expect(await getError(`${fakeURL}?key=notValidKey&id=userId&token=userToken`, Errors.INVALID_KEY)).toBe(true) + }) + + it(`should assign a free id when no id is provided`, async () => { + const realm = new Realm() + const config = { path: '/', key: 'testKey', concurrent_limit: 1, idGenerator: numericIdGenerator() } + const url = `ws://localhost:8080/peerjs?key=${config.key}&token=any` + + const webSocketServer = createTestServer({ url, realm, config }) + + const ws = new WebSocket(url) + + const assignedIdReceived = await checkSequence(ws, [ + { type: MessageType.ASSIGNED_ID, payloadCheck: (payload) => payload.id === '1' } + ]) + + ws.close() + + await webSocketServer.destroy?.() + + expect(assignedIdReceived).toBe(true) + }) + + it(`should check concurrent limit`, async () => { + const realm = new Realm() + const config = { path: '/', key: 'testKey', concurrent_limit: 1, idGenerator: numericIdGenerator() } + const fakeURL = 'ws://localhost:8080/peerjs' + + const createClient = (id: string): Destroyable => { + const url = `${fakeURL}?key=${config.key}&id=${id}&token=${id}` + const webSocketServer = createTestServer({ url, realm, config }) + const ws: Destroyable = new WebSocket(url) + + ws.destroy = async (): Promise => { + ws.close() + + await wait(10) + + await webSocketServer.destroy?.() + + await wait(10) + + ws.destroy = undefined + } + + return ws + } + + const c1 = createClient('1') + + expect(await checkOpen(c1)).toBe(true) + + const c2 = createClient('2') + + expect(await checkSequence(c2, [{ type: MessageType.ERROR, error: Errors.CONNECTION_LIMIT_EXCEED }])).toBe(true) + + await c1.destroy?.() + await c2.destroy?.() + + await wait(10) + + expect(realm.getClientsIds().length).toEqual(0) + + const c3 = createClient('3') + + expect(await checkOpen(c3)).toBe(true) + + await c3.destroy?.() + }) +}) diff --git a/comms/lighthouse/test/peerjs-server/utils.ts b/comms/lighthouse/test/peerjs-server/utils.ts new file mode 100644 index 000000000..e8ca1f397 --- /dev/null +++ b/comms/lighthouse/test/peerjs-server/utils.ts @@ -0,0 +1,14 @@ +import { IdType } from '../../src/peerjs-server/enums' +import { Client, IClient } from '../../src/peerjs-server/models/client' + +export const wait = (ms: number): Promise => new Promise((resolve) => setTimeout(resolve, ms)) +export function createClient({ + id = 'id', + token = '', + msg = '', + idType = IdType.SELF_ASSIGNED +}: { id?: string; token?: string; msg?: string; idType?: IdType } = {}): IClient { + const client = new Client({ id, token, idType, msg }) + client.setAuthenticated(true) + return client +} diff --git a/comms/lighthouse/test/roomsService.spec.ts b/comms/lighthouse/test/roomsService.spec.ts deleted file mode 100644 index 96a0e3657..000000000 --- a/comms/lighthouse/test/roomsService.spec.ts +++ /dev/null @@ -1,136 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-types */ -import { PeerConnectionHint } from 'decentraland-katalyst-utils/Positions' -import { IPeersService, NotificationType } from '../src/peersService' -import { RoomsService } from '../src/roomsService' -import { PeerInfo, PeerRequest } from '../src/types' - -const { arrayWithExactContents } = jasmine - -const layerId = 'blue' - -describe('Rooms service', () => { - let peerService: IPeersService & { sentMessages: [string, any][] } - let roomsService: RoomsService - - function createPeer() { - const id = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER).toString() - return { id: id, protocolVersion: 99 } - } - - beforeEach(() => { - peerService = { - notifyPeersById(peerIds: string[], type: NotificationType, payload: object) { - peerIds.forEach((it) => this.sentMessages.push([it, { type, payload }])) - }, - getPeerInfo(peerId: string) { - return { id: peerId, protocolVersion: 99 } - }, - getPeersInfo(peerIds: string[]) { - return peerIds.map((it) => this.getPeerInfo(it)) - }, - ensurePeerInfo(peer: PeerRequest) { - return { id: peer.peerId!, protocolVersion: 99 } - }, - sentMessages: [], - getOptimalConnectionsFor( - peer: PeerInfo, - otherPeers: PeerInfo[], - targetConnections: number, - maxDistance: number - ): PeerConnectionHint[] { - return [] - } - } - - roomsService = new RoomsService(layerId, {}, { peersService: peerService }) - }) - - it('should allow to add a user to an non-existing room and create it', async () => { - const peerData = createPeer() - await roomsService.addPeerToRoom('room', peerData.id) - expect(roomsService.getPeers('room')).toEqual([peerData]) - }) - - it('should allow to add a user to an existing room', async () => { - const peer1 = createPeer() - const peer2 = createPeer() - - await roomsService.addPeerToRoom('room', peer1.id) - await roomsService.addPeerToRoom('room', peer2.id) - - expect(roomsService.getPeers('room')).toEqual(arrayWithExactContents([peer1, peer2])) - }) - - it('should list all the rooms', async () => { - await roomsService.addPeerToRoom('room1', createPeer().id) - await roomsService.addPeerToRoom('room2', createPeer().id) - - expect(roomsService.getRoomIds()).toEqual(arrayWithExactContents(['room1', 'room2'])) - }) - - it('should list all the rooms that a user is in', async () => { - await roomsService.addPeerToRoom('room1', createPeer().id) - - const aPeer = createPeer() - await roomsService.addPeerToRoom('room2', aPeer.id) - await roomsService.addPeerToRoom('room3', aPeer.id) - - expect(roomsService.getRoomIds({ peerId: aPeer.id })).toEqual(arrayWithExactContents(['room2', 'room3'])) - }) - - it('should allow removing a user from a room', async () => { - const peer1 = createPeer() - await roomsService.addPeerToRoom('room', peer1.id) - - const peer2 = createPeer() - await roomsService.addPeerToRoom('room', peer2.id) - - roomsService.removePeerFromRoom('room', peer2.id) - - expect(roomsService.getPeers('room')).toEqual([peer1]) - }) - - it('should delete a room if all users are removed', async () => { - const peer1 = createPeer() - await roomsService.addPeerToRoom('room', peer1.id) - - roomsService.removePeerFromRoom('room', peer1.id) - - expect(roomsService.getRoomIds()).toEqual([]) - }) - - it('should allow removing a user from all rooms', async () => { - const peer1 = createPeer() - const peer2 = createPeer() - - await roomsService.addPeerToRoom('room1', peer1.id) - await roomsService.addPeerToRoom('room2', peer1.id) - await roomsService.addPeerToRoom('room1', peer2.id) - await roomsService.addPeerToRoom('room2', peer2.id) - - roomsService.removePeer(peer1.id) - - expect(roomsService.getPeers('room1')).toEqual([peer2]) - expect(roomsService.getPeers('room2')).toEqual([peer2]) - }) - - it('should notify when a user is removed from a room', async () => { - const peer1 = createPeer() - const peer2 = createPeer() - - await roomsService.addPeerToRoom('room1', peer1.id) - await roomsService.addPeerToRoom('room1', peer2.id) - - roomsService.removePeerFromRoom('room1', peer1.id) - - const leftMessages = peerService.sentMessages.filter(([id, message]) => message.type === 'PEER_LEFT_ROOM') - - expect(leftMessages.length).toEqual(1) - - const [[id, message]] = leftMessages - - expect(id).toEqual(peer2.id) - expect(message.payload.id).toEqual(peer1.id) - expect(message.payload.roomId).toEqual('room1') - }) -}) diff --git a/comms/lighthouse/tsconfig.json b/comms/lighthouse/tsconfig.json new file mode 100644 index 000000000..33a4c003c --- /dev/null +++ b/comms/lighthouse/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": ".", + "tsBuildInfoFile": "./dist/.tsbuildinfo" + }, + "include": ["src", "test"] +} diff --git a/comms/peer-react-app/App.tsx b/comms/peer-react-app/App.tsx deleted file mode 100644 index ce9db5162..000000000 --- a/comms/peer-react-app/App.tsx +++ /dev/null @@ -1,63 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ -// currently third part packages frmo npm are not available due to issue: -import React from 'react' -import ReactDOM from 'react-dom' -import { Center } from 'decentraland-ui' -import { ConnectForm } from './components/ConnectForm' -import { Peer } from '../peer/src/Peer' -import { IPeer } from '../peer/src/types' -import { Chat } from './components/Chat' - -type ScreenEnum = 'connect' | 'chat' - -class App extends React.Component< - unknown, - { screen: ScreenEnum; peer?: IPeer; room?: string; url?: string; layer?: string } -> { - constructor(props: unknown) { - super(props) - this.state = { screen: 'connect' } - } - - currentScreen(): React.ReactElement { - switch (this.state.screen) { - case 'connect': - return this.connectForm() - case 'chat': - if (this.state.peer && this.state.room) { - return - } else { - return this.connectForm() - } - } - } - - // @ts-ignore - private connectForm(): React.ReactElement< - any, - | string - | ((props: any) => React.ReactElement React.Component)>) - | (new (props: any) => React.Component) - > { - return ( - { - this.setState({ screen: 'chat', peer, layer, room, url }) - }} - peerClass={Peer} - /> - ) - } - - render() { - return ( -
-
{this.currentScreen()}
-
- ) - } -} - -export default function renderApp() { - ReactDOM.render(, document.getElementById('root')) -} diff --git a/comms/peer-react-app/BUILD.bazel b/comms/peer-react-app/BUILD.bazel deleted file mode 100644 index 6fde86f2c..000000000 --- a/comms/peer-react-app/BUILD.bazel +++ /dev/null @@ -1,58 +0,0 @@ -load("@npm_bazel_typescript//:index.bzl", "ts_devserver", "ts_library") - -package(default_visibility = ["//visibility:public"]) - -filegroup( - name = "static", - srcs = glob( - include = [ - "static/**/*", - ], - ), -) - -ts_library( - name = "peer-react-app", - srcs = glob(include = - [ - "**/*.tsx", - "**/*.ts", - ]), - tsconfig = "//:tsconfig.json", - deps = [ - "//comms/peer", - "//commons/utils", - "@npm//@types/react", - "@npm//@types/react-dom", - "@npm//decentraland-ui", - "@npm//react", - "@npm//react-dom", - ], -) - -ts_devserver( - name = "devserver", - additional_root_paths = [ - "npm/node_modules/react/umd", - "npm/node_modules/react-dom/umd", - "react_samples/src/styles", - ], - entry_module = "katalyst/comms/peer-react-app/index", - port = 3001, - # This is the path we'll request from the browser, see index.html - serving_path = "/bundle.js", - # The devserver can serve our static files too - static_files = [ - "index.html", - "static/index.css", - ], - deps = [ - "static/config.js", - ":peer-react-app", - "@npm//decentraland-ui", - "@npm//fp-future:fp-future__umd", - "@npm//protobufjs:protobufjs__umd", - "@npm//react", - "@npm//react-dom", - ], -) diff --git a/comms/peer-react-app/autotest/Test.json b/comms/peer-react-app/autotest/Test.json deleted file mode 100644 index 24a4f205e..000000000 --- a/comms/peer-react-app/autotest/Test.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "Name": "Test", - "CreationDate": "2019-12-13", - "Commands": [ - { - "Command": "while_v2", - "Target": "100 > 5", - "Value": "" - }, - { - "Command": "selectWindow", - "Target": "tab=open", - "Value": "http://localhost:3001" - }, - { - "Command": "open", - "Target": "http://localhost:3001/", - "Value": "" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[2]/div[2]/input", - "Value": "" - }, - { - "Command": "executeScript", - "Target": "return 'test_user' + Math.floor(Math.random()*999999999);", - "Value": "userId" - }, - { - "Command": "type", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[2]/div[2]/input", - "Value": "${userId}" - }, - { - "Command": "type", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/div[2]/input", - "Value": "room" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/button", - "Value": "" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[1]/div/label", - "Value": "" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/textarea", - "Value": "" - }, - { - "Command": "type", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/textarea", - "Value": "test" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/button", - "Value": "" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/textarea", - "Value": "" - }, - { - "Command": "type", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/textarea", - "Value": "hola" - }, - { - "Command": "click", - "Target": "xpath=//*[@id=\"root\"]/div/div/div/div/div/div[3]/button", - "Value": "" - }, - { - "Command": "endWhile", - "Target": "", - "Value": "" - } - ] -} \ No newline at end of file diff --git a/comms/peer-react-app/components/Chat.tsx b/comms/peer-react-app/components/Chat.tsx deleted file mode 100644 index 988d25223..000000000 --- a/comms/peer-react-app/components/Chat.tsx +++ /dev/null @@ -1,278 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ -import React, { useState, useRef, useEffect } from 'react' -import { IPeer } from '../../peer/src/types' -import { Button, Radio } from 'decentraland-ui' -import { PeerMessageTypes } from '../../peer/src/messageTypes' -import { mouse } from './Mouse' - -type Message = { - sender: string - content: string -} - -function MessageBubble(props: { message: Message; own?: boolean }) { - const { sender, content } = props.message - - const classes = ['message-bubble'] - if (props.own) { - classes.push('own') - } - - return ( -
- {sender} -

{content}

-
- ) -} - -function CursorComponent(props: { cursor: Cursor; peerId: string }) { - return ( -
- {props.peerId} -
- ) -} - -type Cursor = { - x: number - y: number - color: string -} - -// function randomColor() { -// return "hsl(" + Math.floor(Math.random() * 359) + ", 100%, 50%)"; -// } - -let intervalId: number | undefined = undefined - -export function Chat(props: { peer: IPeer; layer: string; room: string; url: string }) { - const [messages, setMessages] = useState>({}) - const [message, setMessage] = useState('') - const [cursors, setCursors] = useState>({}) - const [updatingCursors, setUpdatingCursors] = useState(!!new URLSearchParams(location.search).get('updatingCursors')) - const [currentRoom, setCurrentRoom] = useState(props.room) - const [availableRooms, setAvailableRooms] = useState([]) - const [joinedRooms, setJoinedRooms] = useState(props.peer.currentRooms) - const [newRoomName, setNewRoomName] = useState('') - const messagesEndRef: any = useRef() - - document.title = props.peer.peerIdOrFail() - - props.peer.callback = (sender, room, payload) => { - if (!joinedRooms.some((joined) => joined.id === room)) { - return - } - switch (payload.type) { - case 'chat': - appendMessage(room, sender, payload.message) - break - case 'cursorPosition': - setCursorPosition(sender, payload.position) - break - default: - console.log('Received unknown message type: ' + payload.type) - } - } - - function setCursorPosition(sender: string, position: { x: number; y: number }) { - if (updatingCursors) { - const cursorColor = props.peer.isConnectedTo(sender) ? 'green' : 'red' - - props.peer.setPeerPosition(sender, [position.x, position.y, 0]) - - setCursors({ - ...cursors, - [sender]: { color: cursorColor, x: position.x, y: position.y } - }) - } - } - - function sendCursorMessage() { - props.peer.sendMessage( - currentRoom, - { type: 'cursorPosition', position: { ...mouse } }, - PeerMessageTypes.unreliable('cursorPosition') - ) - } - - function sendMessage() { - appendMessage(currentRoom, props.peer.peerIdOrFail(), message) - props.peer.sendMessage(currentRoom, { type: 'chat', message }, PeerMessageTypes.reliable('chat')) - setMessage('') - } - - function appendMessage(room: string, sender: string, content: string) { - setMessages({ - ...messages, - [room]: [...(messages[room] ?? []), { sender, content }] - }) - } - - useEffect(() => { - messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }) - }, [messages]) - - useEffect(() => { - window.clearInterval(intervalId) - if (updatingCursors) { - intervalId = window.setInterval(() => sendCursorMessage(), 500) - } - - return () => window.clearInterval(intervalId) - }, [updatingCursors]) - - useEffect(() => { - setInterval(async () => { - try { - const response = await fetch(`${props.url}/layers/${props.layer}/rooms`) - const rooms = await response.json() - setAvailableRooms(rooms.filter((room) => !joinedRooms.some((joined) => joined.id === room))) - } catch (e) {} - }, 1000) - }, []) - - const users = [...(joinedRooms.find((r) => r.id === currentRoom)?.users?.values() ?? [])] - - async function joinRoom(room: string) { - try { - await props.peer.joinRoom(room) - setAvailableRooms(availableRooms.filter((r) => r !== room)) - setJoinedRooms(props.peer.currentRooms) - - // @ts-ignore - Object.keys(props.peer.knownPeers).forEach((it) => { - // @ts-ignore - const position = { x: props.peer.knownPeers[it].position[0], y: props.peer.knownPeers[it].position[1] } - setCursorPosition(it, position) - }) - } catch (e) { - console.log(`error while joining room ${room}`, e) - } - } - - return ( -
-

Welcome to the Chat {props.peer.peerId}

-
-

Available rooms

-
    - {availableRooms.map((room, i) => ( -
  • joinRoom(room)}> - {room} -
  • - ))} -
-
-
-
-
-

Rooms joined

-
    - {joinedRooms.map((room, i) => ( -
  • - - { - const newRoom = room.id - if (newRoom !== currentRoom) { - setCurrentRoom(newRoom) - } - }} - > - {room.id} - -
  • - ))} -
-
- setNewRoomName(event.currentTarget.value)} - placeholder="roomName" - > - -
-
-
-

Users in room

-
    - {users.map((user, i) => ( -
  • - {user} -
  • - ))} -
-
-
-
-
-

- Now in {currentRoom} -

- setUpdatingCursors(!!data.checked)} - /> -
-
- {messages[currentRoom]?.map((it, i) => ( - - ))} -
-
-
-