-
Notifications
You must be signed in to change notification settings - Fork 22
Expand file tree
/
Copy pathllm_utils.py
More file actions
64 lines (50 loc) · 2.15 KB
/
llm_utils.py
File metadata and controls
64 lines (50 loc) · 2.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utility helpers for LLM calls (temperature compatibility, etc.).
"""
from __future__ import annotations
from typing import Optional, Tuple
TEMPERATURE_UNSUPPORTED_MODELS = {"gpt-5-mini"}
def _normalize_model_name(model: Optional[str]) -> str:
return (model or "").strip().lower()
def supports_temperature(model: Optional[str]) -> bool:
"""Return True if the model accepts the temperature parameter."""
return _normalize_model_name(model) not in TEMPERATURE_UNSUPPORTED_MODELS
def _temperature_guideline(desired_temperature: float) -> str:
"""Provide textual guidance to emulate a temperature when it cannot be set."""
if desired_temperature <= 0.25:
return (
"Respond deterministically with minimal stylistic variance. Keep sentences concise, "
"avoid speculative language, and stay strictly grounded in provided facts."
)
if desired_temperature >= 0.85:
return (
"Adopt a creative tone, explore multiple alternatives, and surface non-obvious insights. "
"Offer at least two contrasting options whenever relevant."
)
return (
"Balance precision and creativity. Provide structured answers while adding one or two "
"supplementary ideas to show moderate exploration without losing clarity."
)
def apply_temperature_strategy(
model: Optional[str],
system_prompt: str,
desired_temperature: float,
) -> Tuple[str, Optional[float]]:
"""
Adjust prompts/parameters to match temperature intent.
Returns the (possibly augmented) system prompt and an optional temperature value.
"""
if supports_temperature(model):
return system_prompt, desired_temperature
directive = (
"## Variability Directive\n"
f"The current model ({model}) ignores the temperature parameter. "
f"Simulate temperature={desired_temperature:.2f} using this guidance:\n"
f"{_temperature_guideline(desired_temperature)}"
)
combined_prompt = (
f"{system_prompt.rstrip()}\n\n{directive}" if system_prompt.strip() else directive
)
return combined_prompt, None