diff --git a/api-reference/llm_queries/chat_completions.mdx b/api-reference/llm_queries/chat_completions.mdx index b184499fc..8e105d9a7 100644 --- a/api-reference/llm_queries/chat_completions.mdx +++ b/api-reference/llm_queries/chat_completions.mdx @@ -182,8 +182,8 @@ curl --request POST \ "response_format": "{ \"type\": \"json_mode\"}", "seed": 11, "stream_options": [ - "include_usage", - true + true, + "include_usage" ], "top_p": 0.5, "tool_choice": "{\"type\": \"function\", \"function\": {\"name\": \"my_function\"}}", @@ -203,7 +203,7 @@ url = "https://api.unify.ai/v0/chat/completions" headers = {"Authorization": "Bearer "} -json_input = {"messages": [{"content": "Tell me a joke", "role": "user"}], "model": "gpt-4o-mini@openai", "max_tokens": 1024, "stop": ["The End.", " is the answer."], "stream": False, "temperature": 0.9, "frequency_penalty": 1.5, "logit_bias": {"0": 10, "1": -75, "2": 90}, "logprobs": False, "top_logprobs": 15, "n": 15, "presence_penalty": -1.1, "response_format": "{ "type": "json_mode"}", "seed": 11, "stream_options": ["include_usage", True], "top_p": 0.5, "tool_choice": "{"type": "function", "function": {"name": "my_function"}}", "parallel_tool_calls": True, "user": "some_user", "signature": "python", "use_custom_keys": True, "tags": True, "drop_params": True} +json_input = {"messages": [{"content": "Tell me a joke", "role": "user"}], "model": "gpt-4o-mini@openai", "max_tokens": 1024, "stop": ["The End.", " is the answer."], "stream": False, "temperature": 0.9, "frequency_penalty": 1.5, "logit_bias": {"0": 10, "1": -75, "2": 90}, "logprobs": False, "top_logprobs": 15, "n": 15, "presence_penalty": -1.1, "response_format": "{ "type": "json_mode"}", "seed": 11, "stream_options": [True, "include_usage"], "top_p": 0.5, "tool_choice": "{"type": "function", "function": {"name": "my_function"}}", "parallel_tool_calls": True, "user": "some_user", "signature": "python", "use_custom_keys": True, "tags": True, "drop_params": True} response = requests.request("POST", url, json=json_input, headers=headers) diff --git a/api-reference/openapi.json b/api-reference/openapi.json index db836ec72..253ada919 100644 --- a/api-reference/openapi.json +++ b/api-reference/openapi.json @@ -3506,8 +3506,8 @@ "title": "Stream Options", "description": "Options for streaming response. Only set this when you set `stream: true`.", "example": [ - "include_usage", - true + true, + "include_usage" ] }, "top_p": { diff --git a/python/dataset.mdx b/python/dataset.mdx index 32b0ac6c0..b7448eeb9 100644 --- a/python/dataset.mdx +++ b/python/dataset.mdx @@ -20,7 +20,7 @@ class Dataset() def __init__(data: Union[str, List[str, Query, DatasetEntry]], *, name: str = None, - auto_sync: bool = False, + auto_sync: Union[bool, str] = False, api_key: Optional[str] = None) ``` @@ -35,7 +35,11 @@ Initialize a local dataset of LLM queries. - `name` - The name of the dataset. - `auto_sync` - Whether to automatically keep this dataset fully synchronized - with the upstream variant at all times. + with the upstream variant at all times. If `True` or "both" then the sync + will be bi-directional, if "upload_only" then all local changes will be + uploaded to the upstream account without any downloads, if "download_only" + then all upstream changes will be downloaded locally without any uploads. + If `False` or "neither" then no synchronization will be done automatically. - `api_key` - API key for accessing the Unify API. If None, it attempts to retrieve the API key from the environment variable UNIFY_KEY. Defaults to