Skip to content

Commit

Permalink
fix streaming w function calls - drop python 3.8
Browse files Browse the repository at this point in the history
  • Loading branch information
anakin87 committed Oct 15, 2024
1 parent 518cf27 commit 0582d32
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 13 deletions.
3 changes: 1 addition & 2 deletions integrations/google_ai/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,14 @@ name = "google-ai-haystack"
dynamic = ["version"]
description = 'Use models like Gemini via Makersuite'
readme = "README.md"
requires-python = ">=3.8"
requires-python = ">=3.9"
license = "Apache-2.0"
keywords = []
authors = [{ name = "deepset GmbH", email = "[email protected]" }]
classifiers = [
"License :: OSI Approved :: Apache Software License",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -347,23 +347,26 @@ def _get_stream_response(
replies: List[ChatMessage] = []
for chunk in stream:
content: Union[str, Dict[str, Any]] = ""
metadata = chunk.to_dict() # we store whole chunk as metadata in streaming calls
for candidate in chunk.candidates:
for part in candidate.content.parts:
if part.text != "":
content = part.text
replies.append(ChatMessage(content=content, role=ChatRole.ASSISTANT, meta=metadata, name=None))
elif part.function_call is not None:
metadata["function_call"] = part.function_call
content = dict(part.function_call.args.items())
dict_chunk = chunk.to_dict() # we store whole chunk as metadata in streaming calls
for candidate in dict_chunk["candidates"]:
for part in candidate["content"]["parts"]:
if "text" in part and part["text"] != "":
content = part["text"]
replies.append(
ChatMessage(content=content, role=ChatRole.ASSISTANT, meta=dict_chunk, name=None)
)
elif "function_call" in part and len(part["function_call"]) > 0:
metadata = dict(dict_chunk)
metadata["function_call"] = part["function_call"]
content = part["function_call"]["args"]
replies.append(
ChatMessage(
content=content,
role=ChatRole.ASSISTANT,
name=part.function_call.name,
name=part["function_call"]["name"],
meta=metadata,
)
)

streaming_callback(StreamingChunk(content=content, meta=metadata))
streaming_callback(StreamingChunk(content=content, meta=dict_chunk))
return replies

0 comments on commit 0582d32

Please sign in to comment.