diff --git a/integrations/google_ai/pyproject.toml b/integrations/google_ai/pyproject.toml index d06e0a53f..88fbcd61c 100644 --- a/integrations/google_ai/pyproject.toml +++ b/integrations/google_ai/pyproject.toml @@ -7,7 +7,7 @@ name = "google-ai-haystack" dynamic = ["version"] description = 'Use models like Gemini via Makersuite' readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" license = "Apache-2.0" keywords = [] authors = [{ name = "deepset GmbH", email = "info@deepset.ai" }] @@ -15,7 +15,6 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Development Status :: 4 - Beta", "Programming Language :: Python", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py b/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py index 56c84968b..af54459ca 100644 --- a/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py +++ b/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py @@ -347,23 +347,26 @@ def _get_stream_response( replies: List[ChatMessage] = [] for chunk in stream: content: Union[str, Dict[str, Any]] = "" - metadata = chunk.to_dict() # we store whole chunk as metadata in streaming calls - for candidate in chunk.candidates: - for part in candidate.content.parts: - if part.text != "": - content = part.text - replies.append(ChatMessage(content=content, role=ChatRole.ASSISTANT, meta=metadata, name=None)) - elif part.function_call is not None: - metadata["function_call"] = part.function_call - content = dict(part.function_call.args.items()) + dict_chunk = chunk.to_dict() # we store whole chunk as metadata in streaming calls + for candidate in dict_chunk["candidates"]: + for part in candidate["content"]["parts"]: + if "text" in part and part["text"] != "": + content = part["text"] + replies.append( + ChatMessage(content=content, role=ChatRole.ASSISTANT, meta=dict_chunk, name=None) + ) + elif "function_call" in part and len(part["function_call"]) > 0: + metadata = dict(dict_chunk) + metadata["function_call"] = part["function_call"] + content = part["function_call"]["args"] replies.append( ChatMessage( content=content, role=ChatRole.ASSISTANT, - name=part.function_call.name, + name=part["function_call"]["name"], meta=metadata, ) ) - streaming_callback(StreamingChunk(content=content, meta=metadata)) + streaming_callback(StreamingChunk(content=content, meta=dict_chunk)) return replies