Skip to content

Commit

Permalink
update lockfile and respond to comments
Browse files Browse the repository at this point in the history
  • Loading branch information
karanataryn committed Feb 14, 2025
1 parent f4b3af9 commit b2f2ecb
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 5 deletions.
2 changes: 1 addition & 1 deletion lib/sycamore/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion lib/sycamore/sycamore/llms/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class Gemini(LLM):
cache: A cache object to use for caching results.
"""

@requires_modules("google-genai")
@requires_modules("google-genai", extra="google-genai")
def __init__(
self,
model_name: Union[GeminiModels, str],
Expand Down Expand Up @@ -82,6 +82,9 @@ def get_generate_kwargs(self, prompt: RenderedPrompt, llm_kwargs: Optional[dict]
**(llm_kwargs or {}),
}
config["max_output_tokens"] = config.get("max_output_tokens", DEFAULT_MAX_TOKENS)
if prompt.response_format:
config["response_mime_type"] = "application/json"
config["response_schema"] = prompt.response_format
content_list = []
for message in prompt.messages:
if message.role == "system":
Expand Down
7 changes: 4 additions & 3 deletions lib/sycamore/sycamore/transforms/summarize_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def summarize_image(self, image: Image.Image, context: Optional[str]) -> str:
The summarized image as a string.
"""
messages = []
if context is not None and self.include_context:
if context is not None:
messages = [RenderedMessage(role="system", content=context)]
messages.append(RenderedMessage(role="user", content=self.prompt, images=[image]))

Expand Down Expand Up @@ -141,11 +141,12 @@ class GeminiImageSummarizer(LLMImageSummarizer):

def __init__(
self,
gemini_model: Gemini,
gemini_model: Optional[Gemini] = None,
prompt: Optional[str] = None,
include_context: bool = True,
):

if gemini_model is None:
gemini_model = Gemini(model_name=self.model)
super().__init__(llm=gemini_model, prompt=prompt, include_context=include_context)


Expand Down

0 comments on commit b2f2ecb

Please sign in to comment.