Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for file contents in simulated messages #34726

Merged
merged 15 commits into from
Mar 12, 2024
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# pylint: skip-file
"""Embeddings generation and management tools."""
import contextlib
import copy
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# pylint: disable=W0125
"""Logging utilities."""
import inspect
import logging
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
#pylint: skip-file
import copy
from typing import List, Tuple

Expand Down Expand Up @@ -29,8 +30,22 @@ async def generate_response(
self.user_template, conversation_history, self.user_template_parameters
)
msg_copy = copy.deepcopy(chat_protocol_message)
result = await self.callback(msg_copy)

result = {}
try:
result = await self.callback(msg_copy)
except Exception as exc:
if "status_code" in dir(exc) and 400 <= exc.status_code < 500 and "response was filtered" in exc.message:
result = {
"messages": [{
"content": ("Error: The response was filtered due to the prompt "
"triggering Azure OpenAI's content management policy. "
"Please modify your prompt and retry."),
"role": "assistant"
}],
"finish_reason": ["stop"],
"id": None,
"template_parameters": {}
}
self.logger.info("Using user provided callback returning response.")

time_taken = 0
Expand All @@ -54,6 +69,9 @@ def _to_chat_protocol(self, template, conversation_history, template_parameters)
for _, m in enumerate(conversation_history):
messages.append({"content": m.message, "role": m.role.value})

if template_parameters.get("file_content", None) and any('File contents:' not in message['content'] for message in messages):
messages.append({"content": f"File contents: {template_parameters['file_content']}", "role": "user"})

return {
"template_parameters": template_parameters,
"messages": messages,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def _join_conversation_starter(self, parameters, to_join):
async def simulate_async(
self,
template: "Template",
max_conversation_turns: int,
max_conversation_turns: int = 2,
parameters: Optional[List[dict]] = None,
jailbreak: bool = False,
api_call_retry_limit: int = 3,
Expand All @@ -208,6 +208,7 @@ async def simulate_async(
:keyword template: An instance of the Template class defining the conversation structure.
:paramtype template: Template
:keyword max_conversation_turns: The maximum number of conversation turns to simulate.
Defaults to 2, change only applies to chat templates.
:paramtype max_conversation_turns: int
:keyword parameters: A list of dictionaries containing the parameter values to be used in the simulations.
Defaults to an empty list.
Expand Down Expand Up @@ -239,7 +240,8 @@ async def simulate_async(

if not isinstance(parameters, list):
raise ValueError(f"Expect parameters to be a list of dictionary, but found {type(parameters)}")

if "conversation" not in template.template_name:
max_conversation_turns = 2
if template.content_harm:
self._ensure_service_dependencies()
self.adversarial = True
Expand Down
2 changes: 1 addition & 1 deletion sdk/ai/azure-ai-generative/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ strict_sphinx = true

[tool.mypy]
python_version = "3.10"
exclude = ["azure/ai/generative/index/_langchain/vendor", "tests", "setup.py", "samples", "azure/ai/generative/evaluate/pf_templates/built_in_metrics"]
exclude = ["azure/ai/generative/index/_langchain/vendor", "tests", "setup.py", "samples", "azure/ai/generative/evaluate/pf_templates/built_in_metrics", "azure/ai/generative/synthetic"]
warn_unused_configs = true
follow_imports = "skip"
ignore_missing_imports = true
Expand Down