Skip to content

Commit

Permalink
add pgvector
Browse files Browse the repository at this point in the history
  • Loading branch information
emrgnt-cmplxty committed Oct 4, 2024
1 parent 2924dfa commit b11ce36
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 8 deletions.
11 changes: 11 additions & 0 deletions .github/workflows/integration-test-workflow-debian.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,17 @@ jobs:
sudo systemctl start postgresql.service
sudo -u postgres psql -c "ALTER USER postgres PASSWORD 'postgres';"
- name: Install pgvector
run: |
sudo apt-get install -y build-essential
sudo apt-get install -y postgresql-server-dev-all
git clone https://github.com/pgvector/pgvector.git
cd pgvector
make
sudo make install
cd ..
sudo -u postgres psql -c "CREATE EXTENSION vector;"
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
Expand Down
6 changes: 4 additions & 2 deletions py/tests/core/providers/embedding/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,20 +20,22 @@ async def cleanup_tasks():


@pytest.fixture
def openai_provider():
def openai_provider(app_config):
config = EmbeddingConfig(
provider="openai",
base_model="text-embedding-ada-002",
base_dimension=1536,
app=app_config
)
return OpenAIEmbeddingProvider(config)


@pytest.fixture
def ollama_provider():
def ollama_provider(app_config):
config = EmbeddingConfig(
provider="ollama",
base_model="mxbai-embed-large",
base_dimension=1024,
app=app_config
)
return OllamaEmbeddingProvider(config)
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@ async def cleanup_tasks():


@pytest.fixture
def litellm_provider():
def litellm_provider(app_config):
config = EmbeddingConfig(
provider="litellm",
base_model="openai/text-embedding-3-small",
base_dimension=1536,
app=app_config
)

return LiteLLMEmbeddingProvider(config)
Expand All @@ -37,9 +38,9 @@ def test_litellm_initialization(litellm_provider):
assert litellm_provider.base_dimension == 1536


def test_litellm_invalid_provider_initialization():
def test_litellm_invalid_provider_initialization(app_config):
with pytest.raises(ValueError):
config = EmbeddingConfig(provider="invalid_provider")
config = EmbeddingConfig(provider="invalid_provider", app=app_config)
LiteLLMEmbeddingProvider(config)


Expand Down Expand Up @@ -73,24 +74,26 @@ async def test_litellm_async_get_embeddings(litellm_provider):
assert all(len(emb) == 1536 for emb in embeddings)


def test_litellm_rerank_model_not_supported():
def test_litellm_rerank_model_not_supported(app_config):
config = EmbeddingConfig(
provider="litellm",
base_model="openai/text-embedding-3-small",
base_dimension=1536,
rerank_model="some-model",
app=app_config
)
with pytest.raises(
ValueError, match="does not support separate reranking"
):
LiteLLMEmbeddingProvider(config)


def test_litellm_unsupported_stage():
def test_litellm_unsupported_stage(app_config):
config = EmbeddingConfig(
provider="litellm",
base_model="openai/text-embedding-3-small",
base_dimension=1536,
app=app_config
)
provider = LiteLLMEmbeddingProvider(config)
with pytest.raises(
Expand All @@ -102,11 +105,12 @@ def test_litellm_unsupported_stage():


@pytest.mark.asyncio
async def test_litellm_async_unsupported_stage():
async def test_litellm_async_unsupported_stage(app_config):
config = EmbeddingConfig(
provider="litellm",
base_model="openai/text-embedding-3-small",
base_dimension=1536,
app=app_config
)
provider = LiteLLMEmbeddingProvider(config)
with pytest.raises(
Expand Down

0 comments on commit b11ce36

Please sign in to comment.