Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OpenAI ErrorTrace attributes #941

Merged
merged 28 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
dd29433
Add openai sync instrumentation.
umaannamalai Oct 10, 2023
d057663
Remove commented code.
umaannamalai Oct 10, 2023
4a681f0
Initial openai error commit
lrafeei Oct 17, 2023
48eb401
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
lrafeei Oct 17, 2023
97cfc40
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
lrafeei Oct 18, 2023
98a0911
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 19, 2023
c586492
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 20, 2023
dbea385
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 20, 2023
4de9da6
Add example tests and mock error responses
hmstepanek Oct 21, 2023
2854965
Changes to attribute collection
lrafeei Oct 23, 2023
73d3cef
Merge branch 'mock-openai-error-responses' into feature-openai-error-…
lrafeei Oct 23, 2023
683ab9d
Change error tests to match mock server
lrafeei Oct 23, 2023
e8ae2d4
[Mega-Linter] Apply linters fixes
lrafeei Oct 23, 2023
207c320
Trigger tests
lrafeei Oct 23, 2023
f770eb9
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 24, 2023
db1196a
Add dt_enabled decorator to error tests
lrafeei Oct 25, 2023
9444850
Add embedded and async error tests
lrafeei Oct 28, 2023
a5e3489
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
lrafeei Oct 28, 2023
d6c1310
[Mega-Linter] Apply linters fixes
lrafeei Oct 28, 2023
aa61018
Trigger tests
lrafeei Oct 28, 2023
e825038
Add http.statusCode to span before notice_error call
lrafeei Oct 30, 2023
486d2db
Report number of messages in error trace even if 0
lrafeei Oct 30, 2023
3872c1e
Revert notice_error and add _nr_message attr
lrafeei Oct 30, 2023
f216d9f
Remove enabled_ml_settings as not needed
hmstepanek Oct 31, 2023
ea5ff91
Add stats engine _nr_message test
hmstepanek Oct 31, 2023
e2ab33c
[Mega-Linter] Apply linters fixes
hmstepanek Oct 31, 2023
baa3a89
Trigger tests
hmstepanek Oct 31, 2023
9beeae3
Revert black formatting in unicode/byte messages
lrafeei Oct 31, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 27 additions & 16 deletions newrelic/api/time_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
)
from newrelic.core.config import is_expected_error, should_ignore_error
from newrelic.core.trace_cache import trace_cache

from newrelic.packages import six

_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -260,6 +259,11 @@ def _observe_exception(self, exc_info=None, ignore=None, expected=None, status_c
module, name, fullnames, message_raw = parse_exc_info((exc, value, tb))
fullname = fullnames[0]

# In case message is in JSON format for OpenAI models
# this will result in a "cleaner" message format
if getattr(value, "_nr_message", None):
message_raw = value._nr_message

# Check to see if we need to strip the message before recording it.

if settings.strip_exception_messages.enabled and fullname not in settings.strip_exception_messages.allowlist:
Expand Down Expand Up @@ -422,23 +426,32 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None,
input_attributes = {}
input_attributes.update(transaction._custom_params)
input_attributes.update(attributes)
error_group_name_raw = settings.error_collector.error_group_callback(value, {
"traceback": tb,
"error.class": exc,
"error.message": message_raw,
"error.expected": is_expected,
"custom_params": input_attributes,
"transactionName": getattr(transaction, "name", None),
"response.status": getattr(transaction, "_response_code", None),
"request.method": getattr(transaction, "_request_method", None),
"request.uri": getattr(transaction, "_request_uri", None),
})
error_group_name_raw = settings.error_collector.error_group_callback(
value,
{
"traceback": tb,
"error.class": exc,
"error.message": message_raw,
"error.expected": is_expected,
"custom_params": input_attributes,
"transactionName": getattr(transaction, "name", None),
"response.status": getattr(transaction, "_response_code", None),
"request.method": getattr(transaction, "_request_method", None),
"request.uri": getattr(transaction, "_request_uri", None),
},
)
if error_group_name_raw:
_, error_group_name = process_user_attribute("error.group.name", error_group_name_raw)
if error_group_name is None or not isinstance(error_group_name, six.string_types):
raise ValueError("Invalid attribute value for error.group.name. Expected string, got: %s" % repr(error_group_name_raw))
raise ValueError(
"Invalid attribute value for error.group.name. Expected string, got: %s"
% repr(error_group_name_raw)
)
except Exception:
_logger.error("Encountered error when calling error group callback:\n%s", "".join(traceback.format_exception(*sys.exc_info())))
_logger.error(
"Encountered error when calling error group callback:\n%s",
"".join(traceback.format_exception(*sys.exc_info())),
)
error_group_name = None

transaction._create_error_node(
Expand Down Expand Up @@ -595,13 +608,11 @@ def update_async_exclusive_time(self, min_child_start_time, exclusive_duration):
def process_child(self, node, is_async):
self.children.append(node)
if is_async:

# record the lowest start time
self.min_child_start_time = min(self.min_child_start_time, node.start_time)

# if there are no children running, finalize exclusive time
if self.child_count == len(self.children):

exclusive_duration = node.end_time - self.min_child_start_time

self.update_async_exclusive_time(self.min_child_start_time, exclusive_duration)
Expand Down
5 changes: 5 additions & 0 deletions newrelic/core/stats_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,6 +724,11 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None,
module, name, fullnames, message_raw = parse_exc_info(error)
fullname = fullnames[0]

# In the case case of JSON formatting for OpenAI models
# this will result in a "cleaner" message format
if getattr(value, "_nr_message", None):
message_raw = value._nr_message

# Check to see if we need to strip the message before recording it.

if settings.strip_exception_messages.enabled and fullname not in settings.strip_exception_messages.allowlist:
Expand Down
62 changes: 58 additions & 4 deletions newrelic/hooks/mlmodel_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,28 @@
OPENAI_VERSION = get_package_version("openai")


def openai_error_attributes(exception, request_args):
api_key = getattr(openai, "api_key", None)
api_key_last_four_digits = f"sk-{api_key[-4:]}" if api_key else ""
number_of_messages = len(request_args.get("messages", []))

error_attributes = {
"api_key_last_four_digits": api_key_last_four_digits,
"request.model": request_args.get("model") or request_args.get("engine") or "",
"request.temperature": request_args.get("temperature", ""),
"request.max_tokens": request_args.get("max_tokens", ""),
"vendor": "openAI",
"ingest_source": "Python",
"response.organization": getattr(exception, "organization", ""),
"response.number_of_messages": number_of_messages,
"http.statusCode": getattr(exception, "http_status", ""),
"error.message": getattr(exception, "_message", ""),
"error.code": getattr(getattr(exception, "error", ""), "code", ""),
"error.param": getattr(exception, "param", ""),
}
return error_attributes


def wrap_embedding_create(wrapped, instance, args, kwargs):
transaction = current_transaction()
if not transaction:
Expand All @@ -36,7 +58,15 @@ def wrap_embedding_create(wrapped, instance, args, kwargs):

ft_name = callable_name(wrapped)
with FunctionTrace(ft_name) as ft:
response = wrapped(*args, **kwargs)
try:
response = wrapped(*args, **kwargs)
except Exception as exc:
error_attributes = openai_error_attributes(exc, kwargs)
exc._nr_message = error_attributes.pop("error.message")
ft.notice_error(
attributes=error_attributes,
)
raise

if not response:
return response
Expand Down Expand Up @@ -105,7 +135,15 @@ def wrap_chat_completion_create(wrapped, instance, args, kwargs):

ft_name = callable_name(wrapped)
with FunctionTrace(ft_name) as ft:
response = wrapped(*args, **kwargs)
try:
response = wrapped(*args, **kwargs)
except Exception as exc:
error_attributes = openai_error_attributes(exc, kwargs)
exc._nr_message = error_attributes.pop("error.message")
ft.notice_error(
attributes=error_attributes,
)
raise

if not response:
return response
Expand Down Expand Up @@ -250,7 +288,15 @@ async def wrap_embedding_acreate(wrapped, instance, args, kwargs):

ft_name = callable_name(wrapped)
with FunctionTrace(ft_name) as ft:
response = await wrapped(*args, **kwargs)
try:
response = await wrapped(*args, **kwargs)
except Exception as exc:
error_attributes = openai_error_attributes(exc, kwargs)
exc._nr_message = error_attributes.pop("error.message")
ft.notice_error(
attributes=error_attributes,
)
raise

if not response:
return response
Expand Down Expand Up @@ -323,7 +369,15 @@ async def wrap_chat_completion_acreate(wrapped, instance, args, kwargs):

ft_name = callable_name(wrapped)
with FunctionTrace(ft_name) as ft:
response = await wrapped(*args, **kwargs)
try:
response = await wrapped(*args, **kwargs)
except Exception as exc:
error_attributes = openai_error_attributes(exc, kwargs)
exc._nr_message = error_attributes.pop("error.message")
ft.notice_error(
attributes=error_attributes,
)
raise

if not response:
return response
Expand Down
Loading
Loading