Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OpenAI ErrorTrace attributes #941

Merged
merged 28 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
dd29433
Add openai sync instrumentation.
umaannamalai Oct 10, 2023
d057663
Remove commented code.
umaannamalai Oct 10, 2023
4a681f0
Initial openai error commit
lrafeei Oct 17, 2023
48eb401
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
lrafeei Oct 17, 2023
97cfc40
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
lrafeei Oct 18, 2023
98a0911
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 19, 2023
c586492
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 20, 2023
dbea385
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 20, 2023
4de9da6
Add example tests and mock error responses
hmstepanek Oct 21, 2023
2854965
Changes to attribute collection
lrafeei Oct 23, 2023
73d3cef
Merge branch 'mock-openai-error-responses' into feature-openai-error-…
lrafeei Oct 23, 2023
683ab9d
Change error tests to match mock server
lrafeei Oct 23, 2023
e8ae2d4
[Mega-Linter] Apply linters fixes
lrafeei Oct 23, 2023
207c320
Trigger tests
lrafeei Oct 23, 2023
f770eb9
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
mergify[bot] Oct 24, 2023
db1196a
Add dt_enabled decorator to error tests
lrafeei Oct 25, 2023
9444850
Add embedded and async error tests
lrafeei Oct 28, 2023
a5e3489
Merge branch 'develop-openai-instrumentation' into feature-openai-err…
lrafeei Oct 28, 2023
d6c1310
[Mega-Linter] Apply linters fixes
lrafeei Oct 28, 2023
aa61018
Trigger tests
lrafeei Oct 28, 2023
e825038
Add http.statusCode to span before notice_error call
lrafeei Oct 30, 2023
486d2db
Report number of messages in error trace even if 0
lrafeei Oct 30, 2023
3872c1e
Revert notice_error and add _nr_message attr
lrafeei Oct 30, 2023
f216d9f
Remove enabled_ml_settings as not needed
hmstepanek Oct 31, 2023
ea5ff91
Add stats engine _nr_message test
hmstepanek Oct 31, 2023
e2ab33c
[Mega-Linter] Apply linters fixes
hmstepanek Oct 31, 2023
baa3a89
Trigger tests
hmstepanek Oct 31, 2023
9beeae3
Revert black formatting in unicode/byte messages
lrafeei Oct 31, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 46 additions & 2 deletions newrelic/hooks/mlmodel_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,53 @@
from newrelic.core.config import global_settings


def openai_error_attributes(exception, request_args):
# 'id' attribute will always be 'None' in this case.
# 'completion_id' is generated at the completion of
# the request and all of these error types occur
# before the response is generated.
#
api_key_LFD = None
if openai.api_key:
api_key_LFD = f"sk-{openai.api_key[-4:]}"
lrafeei marked this conversation as resolved.
Show resolved Hide resolved

error_code = "None"
status_code = getattr(exception, "http_status", "None")
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
if status_code and status_code >= 400 and status_code < 600:
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
error_code = status_code

error_attributes = {
"id": "None",
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"api_key_last_four_digits": api_key_LFD,
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"request.model": request_args.get("model") or request_args.get("engine", "None"),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"temperature": request_args.get("temperature", "None"),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"max_tokens": request_args.get("max_tokens", "None"),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"vendor": "openAI",
"ingest_source": "Python",
"organization": getattr(exception, "organization", "None"),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"number_of_messages": len(request_args.get("messages", [])),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"status_code": status_code,
"error_message": getattr(exception, "_message", "None"),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"error_type": exception.__class__.__name__ or "None",
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"error_code": error_code,
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
"error_param": getattr(exception, "param", "None"),
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
}
return error_attributes


def wrap_embedding_create(wrapped, instance, args, kwargs):
transaction = current_transaction()
if not transaction:
return wrapped(*args, **kwargs)

ft_name = callable_name(wrapped)
with FunctionTrace(ft_name) as ft:
response = wrapped(*args, **kwargs)
try:
response = wrapped(*args, **kwargs)
except Exception as exc:
error_attributes = openai_error_attributes(exc, kwargs)
transaction.notice_error(attributes=error_attributes)
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
lrafeei marked this conversation as resolved.
Show resolved Hide resolved
raise

if not response:
return response
Expand Down Expand Up @@ -98,7 +137,12 @@ def wrap_chat_completion_create(wrapped, instance, args, kwargs):

ft_name = callable_name(wrapped)
with FunctionTrace(ft_name) as ft:
response = wrapped(*args, **kwargs)
try:
response = wrapped(*args, **kwargs)
except Exception as exc:
error_attributes = openai_error_attributes(exc, kwargs)
transaction.notice_error(attributes=error_attributes)
raise

if not response:
return response
Expand Down
145 changes: 145 additions & 0 deletions tests/mlmodel_openai/test_error.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import openai
import pytest
from testing_support.fixtures import override_application_settings
from testing_support.validators.validate_error_trace_attributes import (
validate_error_trace_attributes,
)

from newrelic.api.background_task import background_task
from newrelic.common.object_names import callable_name

enabled_ml_settings = {
"machine_learning.enabled": True,
"machine_learning.inference_events_value.enabled": True,
"ml_insights_events.enabled": True,
}

_test_openai_chat_completion_sync_messages = (
{"role": "system", "content": "You are a scientist."},
{"role": "user", "content": "What is 212 degrees Fahrenheit converted to Celsius?"},
)


# No model provided
@override_application_settings(enabled_ml_settings)
@validate_error_trace_attributes(
callable_name(openai.InvalidRequestError),
exact_attrs={
"agent": {},
"intrinsic": {},
"user": {
"id": "None",
"api_key_last_four_digits": "sk-CRET",
"request.model": "None",
"temperature": 0.7,
"max_tokens": 100,
"vendor": "openAI",
"ingest_source": "Python",
"organization": "None",
"number_of_messages": 2,
"status_code": "None",
"error_message": "Must provide an 'engine' or 'model' parameter to create a <class 'openai.api_resources.chat_completion.ChatCompletion'>",
"error_type": "InvalidRequestError",
"error_code": "None",
"error_param": "engine",
},
},
)
@background_task()
def test_invalid_request_error():
with pytest.raises(openai.InvalidRequestError):
openai.ChatCompletion.create(
# no model provided,
messages=_test_openai_chat_completion_sync_messages,
temperature=0.7,
max_tokens=100,
)


# No api_key provided
@override_application_settings(enabled_ml_settings)
@validate_error_trace_attributes(
callable_name(openai.error.AuthenticationError),
exact_attrs={
"agent": {},
"intrinsic": {},
"user": {
"id": "None",
"api_key_last_four_digits": "None",
"request.model": "gpt-3.5-turbo",
"temperature": 0.7,
"max_tokens": 100,
"vendor": "openAI",
"ingest_source": "Python",
"organization": "None",
"number_of_messages": 2,
"status_code": "None",
"error_message": "No API key provided. You can set your API key in code using 'openai.api_key = <API-KEY>', or you can set the environment variable OPENAI_API_KEY=<API-KEY>). If your API key is stored in a file, you can point the openai module at it with 'openai.api_key_pa",
"error_type": "AuthenticationError",
"error_code": "None",
"error_param": "None",
},
},
)
@background_task()
def test_authentication_error(monkeypatch):
with pytest.raises(openai.error.AuthenticationError):
monkeypatch.setattr(openai, "api_key", None) # openai.api_key = None
openai.ChatCompletion.create(
model="gpt-3.5-turbo", messages=_test_openai_chat_completion_sync_messages, temperature=0.7, max_tokens=100
)

lrafeei marked this conversation as resolved.
Show resolved Hide resolved

# Incorrect URL provided (404 error)
@override_application_settings(enabled_ml_settings)
@validate_error_trace_attributes(
callable_name(openai.error.APIError),
exact_attrs={
"agent": {},
"intrinsic": {},
"user": {
"id": "None",
"api_key_last_four_digits": "sk-CRET",
"request.model": "gpt-3.5-turbo",
"temperature": 0.7,
"max_tokens": 100,
"vendor": "openAI",
"ingest_source": "Python",
"organization": "None",
"number_of_messages": 2,
"status_code": 404,
"error_message": 'HTTP code 404 from API (<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">\n<html><head>\n<title>404 Not Found</title>\n</head><body>\n<h1>Not Found</h1>\n<p>The requested URL was not found on this server.</p>\n<hr>\n<address>Apache/2.4.25 (Debian) Server at thi',
"error_type": "APIError",
"error_code": 404,
"error_param": "None",
},
},
)
@background_task()
def test_api_error():
openai.api_base = "http://thisisnotarealurl.com/"
with pytest.raises(openai.error.APIError):
openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=_test_openai_chat_completion_sync_messages,
temperature=0.7,
max_tokens=100,
)
lrafeei marked this conversation as resolved.
Show resolved Hide resolved


# Timeout is raised by requests.exceptions.Timeout
# APIConnectionError is raised by requests.exceptions.RequestException
Loading