diff --git a/.github/workflows/integration-test-workflow-debian.yml b/.github/workflows/integration-test-workflow-debian.yml index 697f7eb34..84b73b025 100644 --- a/.github/workflows/integration-test-workflow-debian.yml +++ b/.github/workflows/integration-test-workflow-debian.yml @@ -29,6 +29,17 @@ jobs: sudo systemctl start postgresql.service sudo -u postgres psql -c "ALTER USER postgres PASSWORD 'postgres';" + - name: Install pgvector + run: | + sudo apt-get install -y build-essential + sudo apt-get install -y postgresql-server-dev-all + git clone https://github.com/pgvector/pgvector.git + cd pgvector + make + sudo make install + cd .. + sudo -u postgres psql -c "CREATE EXTENSION vector;" + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: diff --git a/py/tests/core/providers/embedding/conftest.py b/py/tests/core/providers/embedding/conftest.py index 48cf7fb69..75e67a124 100644 --- a/py/tests/core/providers/embedding/conftest.py +++ b/py/tests/core/providers/embedding/conftest.py @@ -20,20 +20,22 @@ async def cleanup_tasks(): @pytest.fixture -def openai_provider(): +def openai_provider(app_config): config = EmbeddingConfig( provider="openai", base_model="text-embedding-ada-002", base_dimension=1536, + app=app_config ) return OpenAIEmbeddingProvider(config) @pytest.fixture -def ollama_provider(): +def ollama_provider(app_config): config = EmbeddingConfig( provider="ollama", base_model="mxbai-embed-large", base_dimension=1024, + app=app_config ) return OllamaEmbeddingProvider(config) diff --git a/py/tests/core/providers/embedding/test_litellm_embedding_provider.py b/py/tests/core/providers/embedding/test_litellm_embedding_provider.py index b3c35a83e..e66eaf36c 100644 --- a/py/tests/core/providers/embedding/test_litellm_embedding_provider.py +++ b/py/tests/core/providers/embedding/test_litellm_embedding_provider.py @@ -21,11 +21,12 @@ async def cleanup_tasks(): @pytest.fixture -def litellm_provider(): +def litellm_provider(app_config): config = EmbeddingConfig( provider="litellm", base_model="openai/text-embedding-3-small", base_dimension=1536, + app=app_config ) return LiteLLMEmbeddingProvider(config) @@ -37,9 +38,9 @@ def test_litellm_initialization(litellm_provider): assert litellm_provider.base_dimension == 1536 -def test_litellm_invalid_provider_initialization(): +def test_litellm_invalid_provider_initialization(app_config): with pytest.raises(ValueError): - config = EmbeddingConfig(provider="invalid_provider") + config = EmbeddingConfig(provider="invalid_provider", app=app_config) LiteLLMEmbeddingProvider(config) @@ -73,12 +74,13 @@ async def test_litellm_async_get_embeddings(litellm_provider): assert all(len(emb) == 1536 for emb in embeddings) -def test_litellm_rerank_model_not_supported(): +def test_litellm_rerank_model_not_supported(app_config): config = EmbeddingConfig( provider="litellm", base_model="openai/text-embedding-3-small", base_dimension=1536, rerank_model="some-model", + app=app_config ) with pytest.raises( ValueError, match="does not support separate reranking" @@ -86,11 +88,12 @@ def test_litellm_rerank_model_not_supported(): LiteLLMEmbeddingProvider(config) -def test_litellm_unsupported_stage(): +def test_litellm_unsupported_stage(app_config): config = EmbeddingConfig( provider="litellm", base_model="openai/text-embedding-3-small", base_dimension=1536, + app=app_config ) provider = LiteLLMEmbeddingProvider(config) with pytest.raises( @@ -102,11 +105,12 @@ def test_litellm_unsupported_stage(): @pytest.mark.asyncio -async def test_litellm_async_unsupported_stage(): +async def test_litellm_async_unsupported_stage(app_config): config = EmbeddingConfig( provider="litellm", base_model="openai/text-embedding-3-small", base_dimension=1536, + app=app_config ) provider = LiteLLMEmbeddingProvider(config) with pytest.raises(