Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 25 additions & 10 deletions datadog_checks_base/datadog_checks/base/checks/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,39 @@

from abc import abstractmethod

from datadog_checks.base.utils.db.utils import default_json_event_encoding
from datadog_checks.base.utils.serialization import json

from . import AgentCheck


class DatabaseCheck(AgentCheck):
def database_monitoring_query_sample(self, raw_event: str):
self.event_platform_event(raw_event, "dbm-samples")
def database_monitoring_query_sample(self, raw_event: dict):
self.set_event_platform_properties(raw_event, "dbm-samples")
self.event_platform_event(json.dumps(raw_event, default=default_json_event_encoding), "dbm-samples")

def database_monitoring_query_metrics(self, raw_event: dict):
self.set_event_platform_properties(raw_event, "dbm-metrics")
self.event_platform_event(json.dumps(raw_event, default=default_json_event_encoding), "dbm-metrics")

def database_monitoring_query_metrics(self, raw_event: str):
self.event_platform_event(raw_event, "dbm-metrics")
def database_monitoring_query_activity(self, raw_event: dict):
self.set_event_platform_properties(raw_event, "dbm-activity")
self.event_platform_event(json.dumps(raw_event, default=default_json_event_encoding), "dbm-activity")

def database_monitoring_query_activity(self, raw_event: str):
self.event_platform_event(raw_event, "dbm-activity")
def database_monitoring_metadata(self, raw_event: dict):
self.set_event_platform_properties(raw_event, "dbm-metadata")
self.event_platform_event(json.dumps(raw_event, default=default_json_event_encoding), "dbm-metadata")

def database_monitoring_metadata(self, raw_event: str):
self.event_platform_event(raw_event, "dbm-metadata")
def database_monitoring_health(self, raw_event: dict):
self.set_event_platform_properties(raw_event, "dbm-health")
self.event_platform_event(json.dumps(raw_event, default=default_json_event_encoding), "dbm-health")

def database_monitoring_health(self, raw_event: str):
self.event_platform_event(raw_event, "dbm-health")
def set_event_platform_properties(self, raw_event: dict, track: str):
# Ensure all events have shared properties
raw_event["track"] = track
raw_event["database_instance"] = self.database_identifier
raw_event["dbms"] = self.dbms
raw_event["dbms_version"] = self.dbms_version

@property
@abstractmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, TypedDict

from datadog_checks.base.utils.serialization import json

from .utils import now_ms

if TYPE_CHECKING:
Expand Down Expand Up @@ -143,7 +141,7 @@ def maybe_flush(self, is_last_payload):
# For the last payload, we need to include the total number of payloads collected
# This is used for snapshotting to ensure that all payloads have been received
event["collection_payloads_count"] = self._collection_payloads_count
self._check.database_monitoring_metadata(json.dumps(event))
self._check.database_monitoring_metadata(event)

self._queued_rows = []

Expand Down
8 changes: 1 addition & 7 deletions mysql/datadog_checks/mysql/activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,13 +221,7 @@ def _collect_activity(self):
rows = self._get_activity(cursor)
rows = self._normalize_rows(rows)
event = self._create_activity_event(rows, tags)
payload = json.dumps(event, default=self._json_event_encoding)
self._check.database_monitoring_query_activity(payload)
self._check.histogram(
"dd.mysql.activity.collect_activity.payload_size",
len(payload),
tags=tags + self._check._get_debug_tags(),
)
self._check.database_monitoring_query_activity(event)

def _should_collect_blocking_queries(self):
# type: () -> bool
Expand Down
4 changes: 1 addition & 3 deletions mysql/datadog_checks/mysql/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@
from datadog_checks.base import is_affirmative
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
default_json_event_encoding,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.tracking import tracked_method

# default pg_settings collection interval in seconds
Expand Down Expand Up @@ -180,4 +178,4 @@ def report_mysql_metadata(self):
"cloud_metadata": self._config.cloud_metadata,
"metadata": settings,
}
self._check.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_metadata(event)
4 changes: 1 addition & 3 deletions mysql/datadog_checks/mysql/mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,11 @@
from datadog_checks.base.utils.db.health import HealthEvent, HealthStatus
from datadog_checks.base.utils.db.utils import (
TagManager,
default_json_event_encoding,
tracked_query,
)
from datadog_checks.base.utils.db.utils import (
resolve_db_host as agent_host_resolver,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.mysql import aws
from datadog_checks.mysql.cursor import CommenterCursor, CommenterDictCursor, CommenterSSCursor
from datadog_checks.mysql.health import MySqlHealth
Expand Down Expand Up @@ -1416,7 +1414,7 @@ def _send_database_instance_metadata(self):
},
}
self._database_instance_emitted[self.database_identifier] = event
self.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
self.database_monitoring_metadata(event)

def set_cluster_tags(self, db):
if not self._config.replication_enabled:
Expand Down
3 changes: 1 addition & 2 deletions mysql/datadog_checks/mysql/statement_samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
RateLimitingTTLCache,
default_json_event_encoding,
obfuscate_sql_with_metadata,
)
from datadog_checks.base.utils.serialization import json
Expand Down Expand Up @@ -595,7 +594,7 @@ def _collect_statement_samples(self):
self._tags + ["events_statements_table:{}".format(events_statements_table)] + self._check._get_debug_tags()
)
for e in events:
self._check.database_monitoring_query_sample(json.dumps(e, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(e)
submitted_count += 1
self._check.histogram(
"dd.mysql.collect_statement_samples.time",
Expand Down
6 changes: 3 additions & 3 deletions mysql/datadog_checks/mysql/statements.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from datadog_checks.base.utils.common import to_native_string
from datadog_checks.base.utils.db.sql import compute_sql_signature
from datadog_checks.base.utils.db.statement_metrics import StatementMetrics
from datadog_checks.base.utils.db.utils import DBMAsyncJob, default_json_event_encoding, obfuscate_sql_with_metadata
from datadog_checks.base.utils.db.utils import DBMAsyncJob, obfuscate_sql_with_metadata
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.tracking import tracked_method
from datadog_checks.mysql.cursor import CommenterDictCursor
Expand Down Expand Up @@ -151,7 +151,7 @@ def collect_per_statement_metrics(self):
# No rows to process, can skip the rest of the payload generation and avoid an empty payload
return
for event in self._rows_to_fqt_events(rows, tags):
self._check.database_monitoring_query_sample(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(event)
payload = {
'host': self._check.resolved_hostname,
'timestamp': time.time() * 1000,
Expand All @@ -164,7 +164,7 @@ def collect_per_statement_metrics(self):
'service': self._config.service,
'mysql_rows': rows,
}
self._check.database_monitoring_query_metrics(json.dumps(payload, default=default_json_event_encoding))
self._check.database_monitoring_query_metrics(payload)
self._check.gauge(
"dd.mysql.collect_per_statement_metrics.rows",
len(rows),
Expand Down
7 changes: 3 additions & 4 deletions postgres/datadog_checks/postgres/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
from __future__ import annotations

import json
import re
import time

Expand All @@ -22,7 +21,7 @@
if TYPE_CHECKING:
from datadog_checks.postgres import PostgreSql

from datadog_checks.base.utils.db.utils import DBMAsyncJob, default_json_event_encoding
from datadog_checks.base.utils.db.utils import DBMAsyncJob
from datadog_checks.base.utils.tracking import tracked_method
from datadog_checks.postgres.config_models import InstanceConfig

Expand Down Expand Up @@ -169,7 +168,7 @@ def report_postgres_extensions(self):
"cloud_metadata": self._check.cloud_metadata,
"metadata": self._extensions_cached,
}
self._check.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_metadata(event)

@tracked_method(agent_check_getter=agent_check_getter)
def _collect_postgres_extensions(self):
Expand Down Expand Up @@ -206,7 +205,7 @@ def report_postgres_metadata(self):
"cloud_metadata": self._check.cloud_metadata,
"metadata": self._pg_settings_cached,
}
self._check.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_metadata(event)

if (
self._collect_schemas_enabled
Expand Down
9 changes: 3 additions & 6 deletions postgres/datadog_checks/postgres/statement_samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
RateLimitingTTLCache,
default_json_event_encoding,
obfuscate_sql_with_metadata,
)
from datadog_checks.base.utils.serialization import json
Expand Down Expand Up @@ -488,15 +487,13 @@ def _collect_statement_samples(self):
submitted_count = 0
if self._explain_plan_coll_enabled:
for e in self._collect_plans(rows):
self._check.database_monitoring_query_sample(json.dumps(e, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(e)
submitted_count += 1

if collect_activity:
active_connections = self._get_active_connections()
activity_event = self._create_activity_event(rows, active_connections)
self._check.database_monitoring_query_activity(
json.dumps(activity_event, default=default_json_event_encoding)
)
self._check.database_monitoring_query_activity(activity_event)
self._check.histogram(
"dd.postgres.collect_activity_snapshot.time",
(time.time() - start_time) * 1000,
Expand Down Expand Up @@ -631,7 +628,7 @@ def _row_to_raw_statement_event(self, row):
},
}

self._check.database_monitoring_query_sample(json.dumps(raw_query_event, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(raw_query_event)

def _can_explain_statement(self, obfuscated_statement):
if obfuscated_statement.startswith('SELECT {}'.format(self._explain_function)):
Expand Down
2 changes: 1 addition & 1 deletion postgres/datadog_checks/postgres/statements.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def collect_per_statement_metrics(self):
if not rows:
return
for event in self._rows_to_fqt_events(rows):
self._check.database_monitoring_query_sample(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(event)

payload_wrapper = {
'host': self._check.reported_hostname,
Expand Down
12 changes: 2 additions & 10 deletions sqlserver/datadog_checks/sqlserver/activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
RateLimitingTTLCache,
default_json_event_encoding,
obfuscate_sql_with_metadata,
)
from datadog_checks.base.utils.serialization import json
Expand Down Expand Up @@ -490,13 +489,6 @@ def collect_activity(self):
normalized_rows = self._normalize_queries_and_filter_rows(rows, MAX_PAYLOAD_BYTES)
if self._collect_raw_query_statement:
for raw_statement_event in self._rows_to_raw_statement_events(normalized_rows):
self._check.database_monitoring_query_sample(
json.dumps(raw_statement_event, default=default_json_event_encoding)
)
self._check.database_monitoring_query_sample(raw_statement_event)
event = self._create_activity_event(normalized_rows, connections)
payload = json.dumps(event, default=default_json_event_encoding)
self._check.database_monitoring_query_activity(payload)

self._check.histogram(
"dd.sqlserver.activity.collect_activity.payload_size", len(payload), **self._check.debug_stats_kwargs()
)
self._check.database_monitoring_query_activity(event)
7 changes: 2 additions & 5 deletions sqlserver/datadog_checks/sqlserver/deadlocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@
from time import time

from datadog_checks.base.utils.db.sql import compute_sql_signature
from datadog_checks.base.utils.db.utils import DBMAsyncJob, default_json_event_encoding, obfuscate_sql_with_metadata
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.db.utils import DBMAsyncJob, obfuscate_sql_with_metadata
from datadog_checks.base.utils.tracking import tracked_method
from datadog_checks.sqlserver.config import SQLServerConfig
from datadog_checks.sqlserver.const import STATIC_INFO_ENGINE_EDITION, STATIC_INFO_VERSION
Expand Down Expand Up @@ -260,9 +259,7 @@ def collect_deadlocks(self):
# Send payload only if deadlocks found
if rows:
deadlocks_event = self._create_deadlock_event(rows)
payload = json.dumps(deadlocks_event, default=default_json_event_encoding)
self._log.debug("Deadlocks payload: %s", str(payload))
self._check.database_monitoring_query_activity(payload)
self._check.database_monitoring_query_activity(deadlocks_event)

def _create_deadlock_event(self, deadlock_rows):
event = {
Expand Down
4 changes: 1 addition & 3 deletions sqlserver/datadog_checks/sqlserver/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@
from datadog_checks.base import is_affirmative
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
default_json_event_encoding,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.tracking import tracked_method
from datadog_checks.sqlserver.config import SQLServerConfig
from datadog_checks.sqlserver.const import (
Expand Down Expand Up @@ -158,7 +156,7 @@ def report_sqlserver_metadata(self):
"cloud_metadata": self._check.cloud_metadata,
"metadata": settings_rows,
}
self._check.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_metadata(event)
self.collect_schemas()

def collect_schemas(self):
Expand Down
4 changes: 1 addition & 3 deletions sqlserver/datadog_checks/sqlserver/sqlserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,11 @@
from datadog_checks.base.utils.db.health import HealthEvent, HealthStatus
from datadog_checks.base.utils.db.utils import (
TagManager,
default_json_event_encoding,
tracked_query,
)
from datadog_checks.base.utils.db.utils import (
resolve_db_host as agent_host_resolver,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.sqlserver.activity import SqlserverActivity
from datadog_checks.sqlserver.agent_history import SqlserverAgentHistory
from datadog_checks.sqlserver.config import SQLServerConfig
Expand Down Expand Up @@ -1141,4 +1139,4 @@ def _send_database_instance_metadata(self):
},
}
self._database_instance_emitted[self.database_identifier] = event
self.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
self.database_monitoring_metadata(event)
8 changes: 3 additions & 5 deletions sqlserver/datadog_checks/sqlserver/statements.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,8 @@
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
RateLimitingTTLCache,
default_json_event_encoding,
obfuscate_sql_with_metadata,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.tracking import tracked_method
from datadog_checks.sqlserver.config import SQLServerConfig
from datadog_checks.sqlserver.utils import is_azure_sql_database
Expand Down Expand Up @@ -537,11 +535,11 @@ def collect_statement_metrics_and_plans(self):
if not rows:
return
for event in self._rows_to_fqt_events(rows):
self._check.database_monitoring_query_sample(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(event)
payload = self._to_metrics_payload(rows, self._max_query_metrics)
self._check.database_monitoring_query_metrics(json.dumps(payload, default=default_json_event_encoding))
self._check.database_monitoring_query_metrics(payload)
for event in self._collect_plans(rows, cursor, deadline):
self._check.database_monitoring_query_sample(json.dumps(event, default=default_json_event_encoding))
self._check.database_monitoring_query_sample(event)
plans_submitted += 1

self._check.count(
Expand Down
4 changes: 1 addition & 3 deletions sqlserver/datadog_checks/sqlserver/stored_procedures.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@
from datadog_checks.base.utils.db.statement_metrics import StatementMetrics
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
default_json_event_encoding,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.tracking import tracked_method
from datadog_checks.sqlserver.config import SQLServerConfig

Expand Down Expand Up @@ -176,7 +174,7 @@ def collect_procedure_metrics(self):
self.log.debug("collect_procedure_metrics: no rows returned")
return
payload = self._to_metrics_payload(rows, self._max_procedure_metrics)
self._check.database_monitoring_query_metrics(json.dumps(payload, default=default_json_event_encoding))
self._check.database_monitoring_query_metrics(payload)

def run_job(self):
self.collect_procedure_metrics()
5 changes: 1 addition & 4 deletions sqlserver/datadog_checks/sqlserver/xe_collection/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,10 +629,7 @@ def run_job(self):
self._log.error(f"Error serializing batched payload for logging: {e}")

# Send the batched payload
serialized_payload = json.dumps(batched_payload, default=default_json_event_encoding)
# Log payload size
self._log.debug(f"Batched {self.session_name} payload size: {len(serialized_payload)} bytes")
self._check.database_monitoring_query_activity(serialized_payload)
self._check.database_monitoring_query_activity(batched_payload)

self._log.info(f"Found {len(events)} events from {self.session_name} session")

Expand Down
Loading