Skip to content
This repository has been archived by the owner on Oct 28, 2024. It is now read-only.

feat: update the attribute references #1496

Merged
merged 2 commits into from
Jan 31, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 92 additions & 22 deletions resources/scripts/pytest_otel/src/pytest_otel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import _pytest._code
import _pytest.skipping
from _pytest import timing
import pytest
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
Expand All @@ -18,6 +19,12 @@
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
from opentelemetry.trace.status import Status, StatusCode

# from opentelemetry import metrics
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the metrics are available on >1.7.0, but we cannot update yet due to that version has a bug on the headers, this bug breaks the authentication with headers

# from opentelemetry.sdk.metrics.export import ConsoleMetricsExporter
# from opentelemetry.sdk.metrics.export.controller import PushController
# from opentelemetry.ext.otcollector.metrics_exporter import CollectorMetricsExporter
# from opentelemetry.sdk.metrics import Counter, MeterProvider

LOGGER = logging.getLogger("pytest_otel")
service_name = None
traceparent = None
Expand All @@ -29,7 +36,11 @@
otel_exporter = None
spans = {}
outcome = None

# errors_counter = None
# failed_counter = None
# skipped_counter = None
# total_counter = None
# controller = None

def pytest_addoption(parser):
"""Init command line arguments"""
Expand Down Expand Up @@ -83,7 +94,7 @@ def pytest_addoption(parser):

def init_otel():
"""Init the OpenTelemetry settings"""
global tracer, session_name, service_name, insecure, otel_exporter
global tracer, session_name, service_name, insecure, otel_exporter, errors_counter, failed_counter, skipped_counter, total_counter, controller # noqa: E501
LOGGER.debug("Init Otel : {}".format(service_name))
trace.set_tracer_provider(
TracerProvider(
Expand All @@ -94,13 +105,43 @@ def init_otel():
if in_memory_span_exporter:
otel_exporter = InMemorySpanExporter()
trace.get_tracer_provider().add_span_processor(SimpleSpanProcessor(otel_exporter))
otel_exporter.clear()
# metrics_exporter = ConsoleMetricsExporter()
else:
otel_exporter = OTLPSpanExporter()
trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(otel_exporter))
# metrics_exporter = CollectorMetricsExporter()

tracer = trace.get_tracer(session_name)

# metrics.set_meter_provider(MeterProvider())
# meter = metrics.get_meter(session_name, True)
# controller = PushController(meter, metrics_exporter, 5)
#
# errors_counter = meter.create_counter(
# name="tests.error",
# description="number of error tests",
# unit="1",
# value_type=int
# )
# failed_counter = meter.create_counter(
# name="tests.failed",
# description="number of failed tests",
# unit="1",
# value_type=int
# )
# skipped_counter = meter.create_counter(
# name="tests.skipped",
# description="number of skipped tests",
# unit="1",
# value_type=int
# )
# total_counter = meter.create_counter(
# name="tests.total",
# description="total number of tests",
# unit="1",
# value_type=int
# )


def start_span(span_name, context=None, kind=None):
"""Starts a span with the name, context, and kind passed as parameters"""
Expand All @@ -118,7 +159,7 @@ def end_span(span_name, outcome):
global spans
status = convertOutcome(outcome)
spans[span_name].set_status(status)
spans[span_name].set_attribute("test.outcome", outcome)
spans[span_name].set_attribute("tests.status", outcome)
spans[span_name].end()
LOGGER.debug("The {} transaction ends. -> {}".format(span_name, status))
return spans[span_name]
Expand All @@ -138,6 +179,19 @@ def convertOutcome(outcome):
else:
return Status(status_code=StatusCode.UNSET)

# def update_metrics(outcome):
# """Update the metrics with the test result"""
# if (outcome == "interrupted"
# or outcome == "internal_error"
# or outcome == "usage_error"
# or outcome == "no_tests_collected"
# ):
# errors_counter.add(1)
# elif (outcome == "failed"):
# failed_counter.add(1)
# elif (outcome == "skipped"):
# skipped_counter.add(1)


def exitCodeToOutcome(exit_code):
"""convert pytest ExitCode to outcome"""
Expand Down Expand Up @@ -235,8 +289,9 @@ def pytest_runtest_call(item):
record_exception=True,
set_status_on_exception=True,
) as span:
#total_counter.add(1)
LOGGER.debug("Test {} starts - {}".format(item.name, span.get_span_context()))
span.set_attribute("test.name", item.name)
span.set_attribute("tests.name", item.name)
yield
LOGGER.debug("Test {} ends - {}".format(item.name, span.get_span_context()))

Expand All @@ -257,26 +312,41 @@ def pytest_runtest_call(item):
outcome = "failed"
style = item.config.getoption("tbstyle", "auto")
longrepr = item._repr_failure_py(last_value, style=style)
LOGGER.debug("test.outcome {}".format(outcome))
LOGGER.debug("test.longrepr {}".format(longrepr))
LOGGER.debug("test.last_value {}".format(last_value))

stack_trace = repr(traceback.format_exception(last_type, last_value, last_traceback))
span.set_attribute("test.stack_trace", "{}".format(stack_trace))
span.set_attribute("tests.error", "{}".format(stack_trace))
if hasattr(last_value, "args") and len(getattr(last_value, 'args', [])) > 0:
span.set_attribute("test.error", "{}"
span.set_attribute("tests.message", "{}"
.format(last_value.args[0]))

span.set_attribute("test.last_value", "{}"
.format(getattr(sys, 'last_value')))
span.set_attribute("test.last_type", "{}"
.format(getattr(sys, 'last_type')))

if hasattr(_pytest, 'skipping'):
skipping = _pytest.skipping
xfailed = item._store.get(getattr(skipping, 'xfailed_key', None), None)
span.set_attribute("test.xfailed", "{}"
.format(getattr(xfailed, 'reason', None)))

if longrepr:
span.set_attribute("tests.message", "{}".format(longrepr))
elif last_value:
span.set_attribute("tests.message", "{}".format(last_value))
elif last_type:
span.set_attribute("tests.message", "{}".format(last_type))

skipping = getattr(_pytest, 'skipping', None)
if skipping:
key = getattr(skipping, 'xfailed_key', None)
xfailed = item._store.get(key, None)
reason = getattr(xfailed, 'reason', None)
if reason :
span.set_attribute("tests.message", "{}".format(reason))

#update_metrics(outcome)
status = convertOutcome(outcome)
span.set_status(status)
span.set_attribute("test.outcome", "{}".format(outcome))
span.set_attribute("tests.status", "{}".format(outcome))


@pytest.hookimpl()
def pytest_runtest_logreport(report):
global session_name, spans
test_name = report.nodeid.split("::")[0]

if report.failed and report.when == "teardown":
span = spans[test_name]
span.set_attribute("tests.systemerr", report.capstderr)
span.set_attribute("tests.systemout", report.capstdout)
span.set_attribute("tests.duration", getattr(report, "duration", 0.0))
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def test_xfail_plugin(pytester, otel_service):
pytester.makepyfile(
common_code
+ """
@pytest.mark.xfail
@pytest.mark.xfail(reason="foo bug")
def test_xfail():
assert False
""")
Expand Down
8 changes: 4 additions & 4 deletions resources/scripts/pytest_otel/tests/it/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def assertTestSuit(span, outcome, status):
assert span["kind"] == "SPAN_KIND_SERVER"
assert span["status"]["code"] == status
if outcome is not None:
assertAttrKeyValue(span["attributes"], 'test.outcome', outcome)
assertAttrKeyValue(span["attributes"], 'tests.status', outcome)
assert len(span["parentSpanId"]) == 0
return True

Expand All @@ -59,16 +59,16 @@ def assertSpan(span, name, outcome, status):
"""check attributes of a span"""
assert span["kind"] == "SPAN_KIND_INTERNAL"
assert span["status"]["code"] == status
assertAttrKeyValue(span["attributes"], 'test.name', name)
assertAttrKeyValue(span["attributes"], 'tests.name', name)
if outcome is not None:
assertAttrKeyValue(span["attributes"], 'test.outcome', outcome)
assertAttrKeyValue(span["attributes"], 'tests.status', outcome)
assert len(span["parentSpanId"]) > 0
return True


def assertTest(pytester, name, ts_outcome, ts_status, outcome, status):
"""check a test results are correct"""
pytester.runpytest("--otel-endpoint=http://127.0.0.1:4317", "--otel-service-name=pytest_otel")
pytester.runpytest("--otel-endpoint=http://127.0.0.1:4317", "--otel-service-name=pytest_otel", "--otel-debug=True", "-rsx")
filename = "./tests.json"
waitForFileContent(filename)
foundTest = False
Expand Down
10 changes: 5 additions & 5 deletions resources/scripts/pytest_otel/tests/test_pytest_otel.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,23 @@ def assertTestSuit(span, outcome, status):
assert span["kind"] == "SpanKind.SERVER"
assert span["status"]["status_code"] == status
if outcome is not None:
assert span["attributes"]["test.outcome"] == outcome
assert span["attributes"]["tests.status"] == outcome
assert span["parent_id"] is None
return True


def assertSpan(span, name, outcome, status):
assert span["kind"] == "SpanKind.INTERNAL"
assert span["status"]["status_code"] == status
assert span["attributes"]["test.name"] == name
assert span["attributes"]["tests.name"] == name
if outcome is not None:
assert span["attributes"]["test.outcome"] == outcome
assert span["attributes"]["tests.status"] == outcome
assert len(span["parent_id"]) > 0
return True


def assertTest(pytester, name, ts_outcome, ts_status, outcome, status):
pytester.runpytest("--otel-span-file-output=./test_spans.json")
pytester.runpytest("--otel-span-file-output=./test_spans.json", "--otel-debug=True", "-rsx")
span_list = None
with open("test_spans.json", encoding='utf-8') as input:
span_list = json.loads(input.read())
Expand Down Expand Up @@ -112,7 +112,7 @@ def test_xfail_plugin(pytester):
pytester.makepyfile(
common_code
+ """
@pytest.mark.xfail
@pytest.mark.xfail(reason="foo bug")
def test_xfail():
assert False
""")
Expand Down