Skip to content

Commit

Permalink
[MAIN-2169] Prometheus preformance fix (#1563)
Browse files Browse the repository at this point in the history
* prometrix upgrade

* fixing lock file

* fixed SLA enricher

* updating prometrix

* lock markdown2 on a specific version to prevent test failures

* clarifying typing

* fixing version mismatch

* regenerate lock file

---------

Co-authored-by: arik <[email protected]>
  • Loading branch information
Avi-Robusta and arikalon1 authored Sep 25, 2024
1 parent 5629604 commit d937c5b
Show file tree
Hide file tree
Showing 8 changed files with 494 additions and 402 deletions.
5 changes: 2 additions & 3 deletions playbooks/robusta_playbooks/prometheus_enrichments.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from kubernetes.client.models.v1_service import V1Service
from prometheus_api_client import PrometheusApiClientException
from prometrix import PrometheusQueryResult

from robusta.api import (
ExecutionBaseEvent,
MarkdownBlock,
Expand Down Expand Up @@ -227,9 +226,9 @@ def prometheus_sla_enricher(event: ExecutionBaseEvent, params: PrometheusSlaPara

query_result = 0
if prometheus_result.result_type == "scalar":
query_result = prometheus_result.scalar_result.value
query_result = prometheus_result.scalar_result["value"]
elif prometheus_result.result_type == "vector":
query_result = float(prometheus_result.vector_result[-1].value.value)
query_result = float(prometheus_result.vector_result[-1]["value"]["value"])

rule_result: bool = False
if params.operator == ">":
Expand Down
2 changes: 1 addition & 1 deletion playbooks/robusta_playbooks/version_mismatch_enricher.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class BuildInfoResults:
raw_metrics: List[Dict[str, str]]

def __init__(self, query_result: PrometheusQueryResult):
self.raw_metrics = [result.metric for result in query_result.vector_result]
self.raw_metrics = [result["metric"] for result in query_result.vector_result]
self.api_versions = list(
set([metric.get("git_version") for metric in self.raw_metrics if metric.get("node") is None])
)
Expand Down
838 changes: 463 additions & 375 deletions poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ poetry-core = "1.1.0a7"
docutils="0.19"
sentry-sdk = {extras = ["flask"], version = "^2.13.0"}
opsgenie-sdk = "^2.1.5"
markdown2 = "^2.4.2"
markdown2 = "2.4.13"
toml = "^0.10.2"
watchgod = "^0.7"
webexteamssdk = "^1.6.1"
Expand All @@ -60,7 +60,7 @@ humanize = "^3.13.1"
cryptography = "^43.0.1"
fpdf2 = "^2.7.1"
attrs = "^23.1.0"
prometrix = "0.1.16"
prometrix = "0.2.0"
hikaru-model-26 = "^1.1.1"
apprise = "1.6.0"
rocketchat-api = "^1.30.0"
Expand All @@ -73,7 +73,7 @@ requests = "^2.32.3"
certifi = "^2023.7.22"
regex = "2024.5.15"
pyjwt = "^2.4.0"
urllib3 = "1.26.19"
urllib3 = "^1.26.20"

[tool.poetry.dev-dependencies]
pre-commit = "^2.13.0"
Expand Down
34 changes: 18 additions & 16 deletions src/robusta/core/playbooks/prometheus_enrichment_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import humanize
import pygal
from hikaru.model.rel_1_26 import Node
from prometrix import PrometheusQueryResult, PrometheusSeries
from prometrix import PrometheusQueryResult
from pydantic import BaseModel

from robusta.core.model.base_params import (
Expand All @@ -27,6 +27,8 @@
ChartOptions = namedtuple("ChartOptions", ["query", "values_format"])
BRACKETS_COMMA_PATTERN = r"\{\s*,"

# for performance the series result is a dict of the format of the obj PrometheusSeries
PrometheusSeriesDict = Dict[str, any]

class XAxisLine(BaseModel):
label: str
Expand Down Expand Up @@ -112,27 +114,27 @@ def get_resolution_from_duration(duration: timedelta) -> int:
return _DEFAULT_RESOLUTION


def get_target_name(series: PrometheusSeries) -> Optional[str]:
def get_target_name(series: PrometheusSeriesDict) -> Optional[str]:
for label in ["container", "pod", "node"]:
if label in series.metric:
return series.metric[label]
if label in series["metric"]:
return series["metric"][label]
return None


def get_series_job(series: PrometheusSeries) -> Optional[str]:
return series.metric.get("job")
def get_series_job(series: PrometheusSeriesDict) -> Optional[str]:
return series["metric"]["job"] if "job" in series["metric"] else None


def filter_prom_jobs_results(series_list_result: Optional[List[PrometheusSeries]]) -> Optional[List[PrometheusSeries]]:
def filter_prom_jobs_results(series_list_result: Optional[List[PrometheusSeriesDict]]) -> Optional[List[PrometheusSeriesDict]]:
if not series_list_result or len(series_list_result) == 1:
return series_list_result

target_names = {get_target_name(series) for series in series_list_result if get_target_name(series)}
return_list: List[PrometheusSeries] = []
return_list: List[PrometheusSeriesDict] = []

# takes kubelet job if exists, return first job alphabetically if it doesn't
for target_name in target_names:
relevant_series = [series for series in series_list_result if get_target_name(series) == target_name]
relevant_series: List[PrometheusSeriesDict] = [series for series in series_list_result if get_target_name(series) == target_name]
relevant_kubelet_metric = [series for series in relevant_series if get_series_job(series) == "kubelet"]
if len(relevant_kubelet_metric) == 1:
return_list.append(relevant_kubelet_metric[0])
Expand Down Expand Up @@ -203,27 +205,27 @@ def create_chart_from_prometheus_query(
max_y_value = 0
series_list_result = prometheus_query_result.series_list_result
if filter_prom_jobs:
series_list_result = filter_prom_jobs_results(series_list_result)
series_list_result: List[PrometheusSeriesDict] = filter_prom_jobs_results(series_list_result)

for i, series in enumerate(series_list_result):
label = get_target_name(series)

if not label:
label = "\n".join([v for (key, v) in series.metric.items() if key != "job"])
label = "\n".join([v for (key, v) in series["metric"].items() if key != "job"])

# If the label is empty, try to take it from the additional_label_factory
if label == "" and chart_label_factory is not None:
label = chart_label_factory(i)

values = []
for index in range(len(series.values)):
timestamp = series.timestamps[index]
value = round(float(series.values[index]), FLOAT_PRECISION_LIMIT)
for index in range(len(series["values"])):
timestamp = series["timestamps"][index]
value = round(float(series["values"][index]), FLOAT_PRECISION_LIMIT)
values.append((timestamp, value))
if value > max_y_value:
max_y_value = value
min_time = min(min_time, min(series.timestamps))
max_time = max(max_time, max(series.timestamps))
min_time = min(min_time, min(series["timestamps"]))
max_time = max(max_time, max(series["timestamps"]))

# Adjust min_time to ensure it is at least 1 hour before oom_kill_time, and adjust max_time to ensure it is at least 30 minutes after oom_kill_time, as required for the graph plot adjustments.
if oom_kill_time:
Expand Down
7 changes: 5 additions & 2 deletions src/robusta/core/reporting/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
# 2. We add __init__ methods ourselves for convenience. Without our own __init__ method, something like
# HeaderBlock("foo") doesn't work. Only HeaderBlock(text="foo") would be allowed by pydantic.
import gzip
import json
import itertools
import json
import logging
import textwrap
from copy import deepcopy
Expand Down Expand Up @@ -520,12 +520,15 @@ class PrometheusBlock(BaseBlock):

data: PrometheusQueryResult
metadata: Dict[str, str]

vertical_lines: Optional[List[PrometheusBlockLineData]]
horizontal_lines: Optional[List[PrometheusBlockLineData]]
y_axis_type: Optional[ChartValuesFormat]
graph_name: Optional[str]

class Config:
arbitrary_types_allowed = True


def __init__(
self,
data: PrometheusQueryResult,
Expand Down
2 changes: 1 addition & 1 deletion src/robusta/core/sinks/robusta/dal/model_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def to_evidence_json(
structured_data.append({"type": "list", "data": block.items})
elif isinstance(block, PrometheusBlock):
structured_data.append(
{"type": "prometheus", "data": block.data.dict(), "metadata": block.metadata, "version": 1.0}
{"type": "prometheus", "data": dict(block.data), "metadata": block.metadata, "version": 1.0}
)
elif isinstance(block, TableBlock):
if block.table_name:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def _get_query_prometheus_value(self, query: str) -> Optional[float]:
if query_result.result_type == "error" or not query_result.vector_result:
logging.error("PrometheusDiscoveryUtils failed to get prometheus results.")
return
value = query_result.vector_result[0].value.value
value = query_result.vector_result[0]["value"]["value"]
return_value = float("%.2f" % float(value))
return return_value if return_value >= 0 else None
except:
Expand Down

0 comments on commit d937c5b

Please sign in to comment.