@@ -97,31 +97,19 @@ def _validate_variables(
97
97
98
98
@classmethod
99
99
def _find_live_version (cls , versions : Dict [str , Any ]) -> Optional [str ]:
100
- """Find the 'latest' live version based on 'last_modified' or version naming."""
101
- # Filter only versions where is_live == True
102
- live_versions = {k : v for k , v in versions .items () if v .get ("is_live" , False )}
100
+ """Find the live version. Only one version should be live at a time."""
101
+ # Find versions where is_live == True
102
+ live_versions = [k for k , v in versions .items () if v .get ("is_live" , False )]
103
+
103
104
if not live_versions :
104
105
return None
106
+
107
+ if len (live_versions ) > 1 :
108
+ raise ValueError (
109
+ f"Multiple live versions found: { live_versions } . Only one version can be live at a time."
110
+ )
105
111
106
- # Strategy: pick the version with the largest "last_modified" timestamp
107
- # (Alternate: pick the lexicographically largest version name, etc.)
108
- # We'll parse the "last_modified" as an ISO string if possible.
109
- def parse_iso (dt_str : str ) -> float :
110
- # Convert "YYYY-MM-DDTHH:MM:SS" into a float (timestamp) for easy comparison
111
- import datetime
112
- try :
113
- return datetime .datetime .fromisoformat (dt_str ).timestamp ()
114
- except Exception :
115
- # fallback if parse fails
116
- return 0.0
117
-
118
- live_versions_list = list (live_versions .items ())
119
- live_versions_list .sort (
120
- key = lambda x : parse_iso (x [1 ].get ("last_modified" , "1970-01-01T00:00:00" )),
121
- reverse = True
122
- )
123
- # Return the key of the version with the newest last_modified
124
- return live_versions_list [0 ][0 ] # (version_key, version_data)
112
+ return live_versions [0 ]
125
113
126
114
@classmethod
127
115
def get_prompt (cls , prompt_template : str , version : Optional [str ] = None , ** variables ) -> str :
@@ -191,3 +179,109 @@ def get_prompt(cls, prompt_template: str, version: Optional[str] = None, **varia
191
179
result = result .replace ("\\ n" , "\n " )
192
180
193
181
return result
182
+
183
+
184
+ @classmethod
185
+ def prepare_model_config (cls , prompt_template : str , memory : List [Dict [str , str ]], version : Optional [str ] = None , ** variables ) -> Dict [str , Any ]:
186
+ """Prepare a model configuration ready for OpenAI chat completion API.
187
+
188
+ Args:
189
+ prompt_template (str): The name of the prompt template to use
190
+ memory (List[Dict[str, str]]): List of previous messages in the conversation
191
+ version (Optional[str]): Specific version to use (e.g. "v1").
192
+ If None, uses the latest live version.
193
+ **variables: Variable key-value pairs to fill in the prompt template
194
+
195
+ Returns:
196
+ Dict[str, Any]: Configuration dictionary for OpenAI chat completion API
197
+
198
+ Raises:
199
+ ValueError: If the prompt template is not found, required variables are missing, or system message is empty
200
+ TypeError: If a variable doesn't match the schema type or memory format is invalid
201
+ """
202
+ # Validate memory format
203
+ if not isinstance (memory , list ):
204
+ raise TypeError ("Memory must be a list of message dictionaries" )
205
+
206
+ for msg in memory :
207
+ if not isinstance (msg , dict ):
208
+ raise TypeError ("Each memory item must be a dictionary" )
209
+ if "role" not in msg or "content" not in msg :
210
+ raise ValueError ("Each memory item must have 'role' and 'content' keys" )
211
+ if msg ["role" ] not in ["user" , "assistant" , "system" ]:
212
+ raise ValueError ("Message role must be 'user', 'assistant', or 'system'" )
213
+ if not isinstance (msg ["content" ], str ):
214
+ raise TypeError ("Message content must be a string" )
215
+ if not msg ["content" ].strip ():
216
+ raise ValueError ("Message content cannot be empty" )
217
+
218
+ # Get the system message using existing get_prompt method
219
+ system_message = cls .get_prompt (prompt_template , version , ** variables )
220
+
221
+ if not system_message .strip ():
222
+ raise ValueError ("System message cannot be empty" )
223
+
224
+ # Get the prompt configuration
225
+ if not cls ._prompts :
226
+ cls ._load_prompts ()
227
+
228
+ if prompt_template not in cls ._prompts :
229
+ raise ValueError (f"Prompt template '{ prompt_template } ' not found in prompts.json." )
230
+
231
+ prompt_data = cls ._prompts [prompt_template ]
232
+ versions = prompt_data .get ("versions" , {})
233
+
234
+ # Determine which version to use
235
+ version_data = None
236
+ if version :
237
+ if version not in versions :
238
+ raise ValueError (f"Version '{ version } ' not found for prompt '{ prompt_template } '." )
239
+ version_data = versions [version ]
240
+ else :
241
+ live_version_key = cls ._find_live_version (versions )
242
+ if not live_version_key :
243
+ raise ValueError (f"No live version found for prompt '{ prompt_template } '." )
244
+ version_data = versions [live_version_key ]
245
+
246
+ # Get model configuration from version data
247
+ version_data = versions [live_version_key ]
248
+
249
+ # Initialize the base configuration with required parameters
250
+ model_config = {
251
+ "messages" : [{"role" : "system" , "content" : system_message }]
252
+ }
253
+ model_config ["messages" ].extend (memory )
254
+
255
+ # Model is required for OpenAI API
256
+ if "model" not in version_data :
257
+ raise ValueError (f"Model must be specified in the version data for prompt '{ prompt_template } '" )
258
+ model_config ["model" ] = version_data ["model" ]
259
+
260
+ # Add optional configuration parameters only if they are present and not null
261
+ optional_params = [
262
+ ("temperature" , (int , float )),
263
+ ("max_tokens" , int ),
264
+ ("top_p" , (int , float )),
265
+ ("frequency_penalty" , (int , float )),
266
+ ("presence_penalty" , (int , float ))
267
+ ]
268
+
269
+ for param_name , expected_type in optional_params :
270
+ if param_name in version_data and version_data [param_name ] is not None :
271
+ value = version_data [param_name ]
272
+ if not isinstance (value , expected_type ):
273
+ raise ValueError (f"{ param_name } must be of type { expected_type } " )
274
+ model_config [param_name ] = value
275
+
276
+ # Add tools configuration if present and non-empty
277
+ if "tools" in version_data and version_data ["tools" ]:
278
+ tools = version_data ["tools" ]
279
+ if not isinstance (tools , list ):
280
+ raise ValueError ("Tools configuration must be a list" )
281
+ model_config ["tools" ] = tools
282
+
283
+ # If tools are present, also set tool_choice if specified
284
+ if "tool_choice" in version_data :
285
+ model_config ["tool_choice" ] = version_data ["tool_choice" ]
286
+
287
+ return model_config
0 commit comments