From 74918f96415a0dae5d9aa3411046afd0bb51d8c5 Mon Sep 17 00:00:00 2001 From: Rathijit Paul <30369246+rathijitpapon@users.noreply.github.com> Date: Fri, 28 Jun 2024 14:53:42 +0600 Subject: [PATCH] :bug: fix missing packages and :fire: removed secrest from deployment files --- agency/app/tracing/utils.py | 27 ++++++----- agency/poetry.lock | 62 ++++++++++++++++++++++++- agency/pyproject.toml | 4 ++ frontend/Dockerfile | 5 ++ frontend/Makefile | 4 +- frontend/helm/templates/deployment.yaml | 18 ++----- 6 files changed, 90 insertions(+), 30 deletions(-) diff --git a/agency/app/tracing/utils.py b/agency/app/tracing/utils.py index 88bd23c3..21dd2eea 100644 --- a/agency/app/tracing/utils.py +++ b/agency/app/tracing/utils.py @@ -4,7 +4,6 @@ from openinference.instrumentation.llama_index import LlamaIndexInstrumentor from openinference.semconv.resource import ResourceAttributes from opentelemetry import trace -from opentelemetry import trace as trace_api from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter from opentelemetry.instrumentation.grpc import GrpcAioInstrumentorServer from opentelemetry.instrumentation.llamaindex import ( @@ -25,24 +24,12 @@ def setup_tracing(settings: TracingSettings) -> None: if settings.environment == "production": - # jaeger opentelemetry tracing resource = Resource( attributes={ ResourceAttributes.PROJECT_NAME: settings.project_name, "service.name": settings.service_name, }, ) - trace.set_tracer_provider(TracerProvider(resource=resource)) - trace.get_tracer_provider().add_span_processor( - BatchSpanProcessor( - OTLPSpanExporter( - endpoint=settings.jaeger_endpoint, - ) - ) - ) - set_global_textmap(TraceContextTextMapPropagator()) - GrpcAioInstrumentorServer().instrument() - OpentelemetryLlamaIndexInstrumentor().instrument() # sentry tracing sentry_sdk.init( @@ -60,5 +47,17 @@ def setup_tracing(settings: TracingSettings) -> None: span_exporter = OTLPSpanExporter(endpoint=settings.phoenix_api) span_processor = SimpleSpanProcessor(span_exporter=span_exporter) tracer_provider.add_span_processor(span_processor=span_processor) - trace_api.set_tracer_provider(tracer_provider=tracer_provider) + trace.set_tracer_provider(tracer_provider=tracer_provider) LlamaIndexInstrumentor().instrument() + + # jaeger opentelemetry tracing + trace.get_tracer_provider().add_span_processor( + BatchSpanProcessor( + OTLPSpanExporter( + endpoint=settings.jaeger_endpoint, + ) + ) + ) + set_global_textmap(TraceContextTextMapPropagator()) + GrpcAioInstrumentorServer().instrument() + OpentelemetryLlamaIndexInstrumentor().instrument() diff --git a/agency/poetry.lock b/agency/poetry.lock index 85dbdf6b..28690e37 100644 --- a/agency/poetry.lock +++ b/agency/poetry.lock @@ -1569,6 +1569,17 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +optional = false +python-versions = ">=3.5" +files = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] + [[package]] name = "iniconfig" version = "2.0.0" @@ -3165,6 +3176,44 @@ opentelemetry-api = ">=1.4,<2.0" setuptools = ">=16.0" wrapt = ">=1.0.0,<2.0.0" +[[package]] +name = "opentelemetry-instrumentation-grpc" +version = "0.46b0" +description = "OpenTelemetry gRPC instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_grpc-0.46b0-py3-none-any.whl", hash = "sha256:cccfb28db07c28849709f2dcf330237fae0fca9f86971bfce27b28bb9a8b0577"}, + {file = "opentelemetry_instrumentation_grpc-0.46b0.tar.gz", hash = "sha256:9c5738592cf82672805099826b676d352324b54e03f9ac72a1368ba0605d6ff9"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +wrapt = ">=1.0.0,<2.0.0" + +[package.extras] +instruments = ["grpcio (>=1.27,<2.0)"] + +[[package]] +name = "opentelemetry-instrumentation-llamaindex" +version = "0.23.0" +description = "OpenTelemetry LlamaIndex instrumentation" +optional = false +python-versions = "<4,>=3.9" +files = [ + {file = "opentelemetry_instrumentation_llamaindex-0.23.0-py3-none-any.whl", hash = "sha256:792f7e6fe1b9051fae128dce2da30e8bc4dc4ff0593978ff86d145df64148290"}, + {file = "opentelemetry_instrumentation_llamaindex-0.23.0.tar.gz", hash = "sha256:8fe25346f431689822560a1dbf844732d28fd91e7dfd73754afdc1d97d334a7b"}, +] + +[package.dependencies] +inflection = ">=0.5.1,<0.6.0" +opentelemetry-api = ">=1.25.0,<2.0.0" +opentelemetry-instrumentation = ">=0.46b0,<0.47" +opentelemetry-semantic-conventions = ">=0.46b0,<0.47" +opentelemetry-semantic-conventions-ai = "0.3.1" + [[package]] name = "opentelemetry-proto" version = "1.25.0" @@ -3209,6 +3258,17 @@ files = [ [package.dependencies] opentelemetry-api = "1.25.0" +[[package]] +name = "opentelemetry-semantic-conventions-ai" +version = "0.3.1" +description = "OpenTelemetry Semantic Conventions Extension for Large Language Models" +optional = false +python-versions = "<4,>=3.9" +files = [ + {file = "opentelemetry_semantic_conventions_ai-0.3.1-py3-none-any.whl", hash = "sha256:14af6daefa46e15d12e5ca192a4f9ca4400698bc267326da53f3cde6d4978031"}, + {file = "opentelemetry_semantic_conventions_ai-0.3.1.tar.gz", hash = "sha256:02c6418719b423f4add17b7d65408b21977e4d3abcd736fffb961245403e7dc6"}, +] + [[package]] name = "optuna" version = "3.6.1" @@ -5934,4 +5994,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "~3.12" -content-hash = "5496407bf8ec36e08b1269c6d134149a9ab2803c6f9d3641dca988001764a2e8" +content-hash = "4e8b2fec95d601f2100a96dab147cee30722857d66596a0028a4e7fe60f5177b" diff --git a/agency/pyproject.toml b/agency/pyproject.toml index 3343f54c..2f122cf9 100644 --- a/agency/pyproject.toml +++ b/agency/pyproject.toml @@ -40,6 +40,10 @@ qdrant-client = "^1.9.1" llmlingua = "^0.2.2" llama-index-llms-huggingface = "^0.2.2" +opentelemetry-instrumentation-llamaindex = "^0.23.0" +opentelemetry-sdk = "^1.25.0" +opentelemetry-instrumentation = "^0.46b0" +opentelemetry-instrumentation-grpc = "^0.46b0" [tool.poetry.group.dev.dependencies] pytest = "^8.1.1" mypy = "^1.9.0" diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 731dde66..26cdeaa7 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -13,6 +13,7 @@ COPY next.config.mjs . COPY postcss.config.mjs . COPY tailwind.config.ts . COPY tsconfig.json . +COPY sentry.* . ARG POSTHOG_KEY ARG POSTHOG_API_HOST @@ -20,6 +21,8 @@ ARG POSTHOG_UI_HOST ARG API_URL ARG AUTH_URL ARG AUTH_SECRET +ARG SENTRY_DSN +ARG SENTRY_AUTH_TOKEN ENV NEXT_TELEMETRY_DISABLED 1 ENV NEXT_PUBLIC_POSTHOG_KEY $POSTHOG_KEY @@ -28,6 +31,8 @@ ENV NEXT_PUBLIC_POSTHOG_UI_HOST $POSTHOG_UI_HOST ENV NEXT_PUBLIC_API_URL $API_URL ENV AUTH_URL $AUTH_URL ENV AUTH_SECRET $AUTH_SECRET +ENV NEXT_PUBLIC_SENTRY_DSN $SENTRY_DSN +ENV SENTRY_AUTH_TOKEN $SENTRY_AUTH_TOKEN RUN npm run build diff --git a/frontend/Makefile b/frontend/Makefile index 8e250b3a..f2d1abd3 100644 --- a/frontend/Makefile +++ b/frontend/Makefile @@ -8,9 +8,11 @@ POSTHOG_UI_HOST = https://app.posthog.com API_URL = AUTH_URL = AUTH_SECRET = +SENTRY_DSN = +SENTRY_AUTH_TOKEN = ecr_deploy: login - docker buildx build --platform linux/amd64 --build-arg POSTHOG_KEY=$(POSTHOG_KEY) --build-arg POSTHOG_API_HOST=$(POSTHOG_API_HOST) --build-arg POSTHOG_UI_HOST=$(POSTHOG_UI_HOST) --build-arg API_URL=$(API_URL) --build-arg AUTH_URL=${AUTH_URL} --build-arg AUTH_SECRET=${AUTH_SECRET} -t $(Image) . + docker buildx build --platform linux/amd64 --build-arg POSTHOG_KEY=$(POSTHOG_KEY) --build-arg POSTHOG_API_HOST=$(POSTHOG_API_HOST) --build-arg POSTHOG_UI_HOST=$(POSTHOG_UI_HOST) --build-arg API_URL=$(API_URL) --build-arg AUTH_URL=$(AUTH_URL) --build-arg AUTH_SECRET=$(AUTH_SECRET) --build-arg SENTRY_DSN=$(SENTRY_DSN) --build-arg SENTRY_AUTH_TOKEN=$(SENTRY_AUTH_TOKEN) -t $(Image) . docker tag $(Image) $(ECR):$(TAG) docker push $(ECR):$(TAG) diff --git a/frontend/helm/templates/deployment.yaml b/frontend/helm/templates/deployment.yaml index 6edddb93..8c64fc9f 100644 --- a/frontend/helm/templates/deployment.yaml +++ b/frontend/helm/templates/deployment.yaml @@ -16,21 +16,11 @@ spec: app: search-{{ .Release.Name }} spec: containers: - - image: {{ .Values.service.image }} + - envFrom: + - configMapRef: + name: {{ .Release.Name }}-configmap + image: {{ .Values.service.image }} imagePullPolicy: Always - env: - - name: NEXT_PUBLIC_POSTHOG_UI_HOST - value: https://app.posthog.com - - name: NEXT_PUBLIC_POSTHOG_KEY - value: phc_Qau6pGkA8BXUr7JuDxgALufZqhxMKAk9UVGY4UFQyXu - - name: NEXT_PUBLIC_POSTHOG_API_HOST - value: https://search.curieo.ai/ingest - - name: NEXT_PUBLIC_API_URL - value: http://search-server.search.svc.cluster.local:3030 - - name: AUTH_URL - value: https://search.curieo.ai - - name: AUTH_SECRET - value: 05mk7ukF/GSG1dHLYVp04Mxa49slQb2W3N9v1aMcRTM= name: search-{{ .Release.Name }} ports: - containerPort: {{ .Values.service.port }}