Skip to content

Commit f8712d1

Browse files
author
Judd
committed
add brevity_penalty
1 parent 1efabc2 commit f8712d1

File tree

3 files changed

+30
-1162
lines changed

3 files changed

+30
-1162
lines changed

src/chatllm/bindings/chatllm.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,9 @@ class PrintType(IntEnum):
2121
PRINTLN_HISTORY_USER = 5, # print a whole line: user input history
2222
PRINTLN_HISTORY_AI = 6, # print a whole line: AI output history
2323
PRINTLN_TOOL_CALLING = 7, # print a whole line: tool calling (supported by only a few models)
24-
PRINTLN_EMBEDDING = 8, # print a whole line: embedding (example: "0.1, 0.3, ...")
24+
PRINTLN_EMBEDDING = 8, # print a whole line: embedding (example: "0.1,0.3,...")
2525
PRINTLN_RANKING = 9, # print a whole line: ranking (example: "0.8")
26-
PRINTLN_TOKEN_IDS =10, # print a whole line: token ids (example: "1, 3, 5, 8, ...")
26+
PRINTLN_TOKEN_IDS =10, # print a whole line: token ids (example: "1,3,5,8, ...")
2727
PRINTLN_LOGGING =11, # print a whole line: internal logging with the first char indicating level
2828
# (space): None; D: Debug; I: Info; W: Warn; E: Error; .: continue
2929
PRINTLN_BEAM_SEARCH =12, # print a whole line: a result of beam search with a prefix of probability
@@ -336,7 +336,7 @@ def tool_completion(self, user_input: str, completion_id = None) -> None:
336336

337337
def text_tokenize(self, txt: str) -> list[int]:
338338
self._result_text_tokenize = ''
339-
assert self._lib.text_tokenize(self._chat, txt) == 0, 'text_embedding failed'
339+
assert self._lib.text_tokenize(self._chat, txt) >= 0, 'text_tokenize failed'
340340
return json.loads(f"[{self._result_text_tokenize}]")
341341

342342
def text_embedding(self, txt: str) -> list[float]:
@@ -404,7 +404,7 @@ def callback_print_embedding(self, s: str) -> None:
404404
def callback_print_ranking(self, s: str) -> None:
405405
self._result_ranking = s
406406

407-
def callback_print_text_tokenize(self, s: str) -> None:
407+
def callback_text_tokenize(self, s: str) -> None:
408408
self._result_text_tokenize = s
409409

410410
def call_tool(self, s: str) -> None:

0 commit comments

Comments
 (0)