Skip to content

Commit 961a037

Browse files
chore: update charm libraries
1 parent 2a764fa commit 961a037

1 file changed

Lines changed: 35 additions & 10 deletions

File tree

lib/charms/grafana_agent/v0/cos_agent.py

Lines changed: 35 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,9 @@ def __init__(self, *args):
211211
```
212212
"""
213213

214+
import copy
214215
import enum
216+
import hashlib
215217
import json
216218
import logging
217219
import socket
@@ -254,7 +256,7 @@ class _MetricsEndpointDict(TypedDict):
254256

255257
LIBID = "dc15fa84cef84ce58155fb84f6c6213a"
256258
LIBAPI = 0
257-
LIBPATCH = 24
259+
LIBPATCH = 25
258260

259261
PYDEPS = ["cosl >= 0.0.50", "pydantic"]
260262

@@ -308,6 +310,13 @@ def _dedupe_list(items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
308310
return unique_items
309311

310312

313+
def _dict_hash_except_key(scrape_config: Dict[str, Any], key: Optional[str]):
314+
"""Get a hash of the scrape_config dict, except for the specified key."""
315+
cfg_for_hash = {k: v for k, v in scrape_config.items() if k != key}
316+
serialized = json.dumps(cfg_for_hash, sort_keys=True)
317+
return hashlib.blake2b(serialized.encode(), digest_size=4).hexdigest()
318+
319+
311320
class TracingError(Exception):
312321
"""Base class for custom errors raised by tracing."""
313322

@@ -697,6 +706,27 @@ def _on_refresh(self, event):
697706
) as e:
698707
logger.error("Invalid relation data provided: %s", e)
699708

709+
def _deterministic_scrape_configs(
710+
self, scrape_configs: List[Dict[str, Any]]
711+
) -> List[Dict[str, Any]]:
712+
"""Get deterministic scrape_configs with stable job names.
713+
714+
For stability across serializations, compute a short per-config hash
715+
and append it to the existing job name (or 'default'). Keep the app
716+
name as a prefix: <app>_<job_or_default>_<8hex-hash>.
717+
718+
Hash the whole scrape_config (except any existing job_name) so the
719+
suffix is sensitive to all stable fields. Use deterministic JSON
720+
serialization.
721+
"""
722+
local_scrape_configs = copy.deepcopy(scrape_configs)
723+
for scrape_config in local_scrape_configs:
724+
name = scrape_config.get("job_name", "default")
725+
short_id = _dict_hash_except_key(scrape_config, "job_name")
726+
scrape_config["job_name"] = f"{self._charm.app.name}_{name}_{short_id}"
727+
728+
return sorted(local_scrape_configs, key=lambda c: c.get("job_name", ""))
729+
700730
@property
701731
def _scrape_jobs(self) -> List[Dict]:
702732
"""Return a list of scrape_configs.
@@ -711,22 +741,17 @@ def _scrape_jobs(self) -> List[Dict]:
711741
scrape_configs = self._scrape_configs.copy()
712742

713743
# Convert "metrics_endpoints" to standard scrape_configs, and add them in
714-
unit_name = self._charm.unit.name.replace("/", "_")
715744
for endpoint in self._metrics_endpoints:
716-
port = endpoint["port"]
717-
path = endpoint["path"]
718-
sanitized_path = path.strip("/").replace("/", "_")
719745
scrape_configs.append(
720746
{
721-
"job_name": f"{unit_name}_localhost_{port}_{sanitized_path}",
722-
"metrics_path": path,
723-
"static_configs": [{"targets": [f"localhost:{port}"]}],
747+
"metrics_path": endpoint["path"],
748+
"static_configs": [{"targets": [f"localhost:{endpoint['port']}"]}],
724749
}
725750
)
726751

727752
scrape_configs = scrape_configs or []
728753

729-
return scrape_configs
754+
return self._deterministic_scrape_configs(scrape_configs)
730755

731756
@property
732757
def _metrics_alert_rules(self) -> Dict:
@@ -742,7 +767,7 @@ def _metrics_alert_rules(self) -> Dict:
742767
)
743768
alert_rules.add_path(self._metrics_rules, recursive=self._recursive)
744769
alert_rules.add(
745-
generic_alert_groups.application_rules,
770+
copy.deepcopy(generic_alert_groups.application_rules),
746771
group_name_prefix=JujuTopology.from_charm(self._charm).identifier,
747772
)
748773

0 commit comments

Comments
 (0)