Skip to content

Commit 969ea29

Browse files
new: Added type stub (#458)
* new: Added type stub * chore: Updated stubs * chore: device_id type hint * chore: add -> none to init without args * new: Added workflow type check * chore: Revert added type checkers
1 parent a5b266e commit 969ea29

File tree

1 file changed

+56
-0
lines changed

1 file changed

+56
-0
lines changed

tests/type_stub.py

+56
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
from fastembed import TextEmbedding, LateInteractionTextEmbedding, SparseTextEmbedding
2+
from fastembed.sparse.bm25 import Bm25
3+
from fastembed.rerank.cross_encoder import TextCrossEncoder
4+
5+
6+
text_embedder = TextEmbedding(cache_dir="models")
7+
late_interaction_embedder = LateInteractionTextEmbedding(model_name="", cache_dir="models")
8+
reranker = TextCrossEncoder(model_name="", cache_dir="models")
9+
sparse_embedder = SparseTextEmbedding(model_name="", cache_dir="models")
10+
bm25_embedder = Bm25(
11+
model_name="",
12+
k=1.0,
13+
b=1.0,
14+
avg_len=1.0,
15+
language="",
16+
token_max_length=1,
17+
disable_stemmer=False,
18+
specific_model_path="models",
19+
)
20+
21+
text_embedder.list_supported_models()
22+
text_embedder.embed(documents=[""], batch_size=1, parallel=1)
23+
text_embedder.embed(documents="", parallel=None, task_id=1)
24+
text_embedder.query_embed(query=[""], batch_size=1, parallel=1)
25+
text_embedder.query_embed(query="", parallel=None)
26+
text_embedder.passage_embed(texts=[""], batch_size=1, parallel=1)
27+
text_embedder.passage_embed(texts=[""], parallel=None)
28+
29+
late_interaction_embedder.list_supported_models()
30+
late_interaction_embedder.embed(documents=[""], batch_size=1, parallel=1)
31+
late_interaction_embedder.embed(documents="", parallel=None)
32+
late_interaction_embedder.query_embed(query=[""], batch_size=1, parallel=1)
33+
late_interaction_embedder.query_embed(query="", parallel=None)
34+
late_interaction_embedder.passage_embed(texts=[""], batch_size=1, parallel=1)
35+
late_interaction_embedder.passage_embed(texts=[""], parallel=None)
36+
37+
reranker.list_supported_models()
38+
reranker.rerank(query="", documents=[""], batch_size=1, parallel=1)
39+
reranker.rerank(query="", documents=[""], parallel=None)
40+
reranker.rerank_pairs(pairs=[("", "")], batch_size=1, parallel=1)
41+
reranker.rerank_pairs(pairs=[("", "")], parallel=None)
42+
43+
sparse_embedder.list_supported_models()
44+
sparse_embedder.embed(documents=[""], batch_size=1, parallel=1)
45+
sparse_embedder.embed(documents="", batch_size=1, parallel=None)
46+
sparse_embedder.query_embed(query=[""], batch_size=1, parallel=1)
47+
sparse_embedder.query_embed(query="", batch_size=1, parallel=None)
48+
sparse_embedder.passage_embed(texts=[""], batch_size=1, parallel=1)
49+
sparse_embedder.passage_embed(texts=[""], batch_size=1, parallel=None)
50+
51+
bm25_embedder.list_supported_models()
52+
bm25_embedder.embed(documents=[""], batch_size=1, parallel=1)
53+
bm25_embedder.embed(documents="", batch_size=1, parallel=None)
54+
bm25_embedder.query_embed(query=[""], batch_size=1, parallel=1)
55+
bm25_embedder.query_embed(query="", batch_size=1, parallel=None)
56+
bm25_embedder.raw_embed(documents=[""])

0 commit comments

Comments
 (0)