Skip to content

Commit

Permalink
Remove some more basic Python 2 compatibility code (#18619)
Browse files Browse the repository at this point in the history
* Remove some more basic Python 2 compatibility code

* fix spark
  • Loading branch information
iliakur committed Sep 19, 2024
1 parent 25c7fbe commit 8c600ef
Show file tree
Hide file tree
Showing 13 changed files with 34 additions and 64 deletions.
3 changes: 1 addition & 2 deletions apache/datadog_checks/apache/apache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import re

from six.moves.urllib.parse import urlparse
from urllib.parse import urlparse

from datadog_checks.base import AgentCheck, ConfigurationError
from datadog_checks.base.errors import CheckException
Expand Down
2 changes: 1 addition & 1 deletion gitlab/datadog_checks/gitlab/common.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# (C) Datadog, Inc. 2023-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from six.moves.urllib.parse import urlparse
from urllib.parse import urlparse


def get_gitlab_version(http, log, gitlab_url, api_token):
Expand Down
3 changes: 2 additions & 1 deletion scylla/datadog_checks/scylla/scylla.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from urllib.parse import urlparse

from six import PY2
from six.moves.urllib.parse import urlparse

from datadog_checks.base import ConfigurationError, OpenMetricsBaseCheck

Expand Down
27 changes: 13 additions & 14 deletions spark/datadog_checks/spark/spark.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from urllib.parse import urljoin, urlparse, urlsplit, urlunsplit

from bs4 import BeautifulSoup
from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout
from simplejson import JSONDecodeError
from six import iteritems, itervalues
from six.moves.urllib.parse import urljoin, urlparse, urlsplit, urlunsplit

from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative

Expand Down Expand Up @@ -114,7 +113,7 @@ def check(self, _):

# Report success after gathering all metrics from the ApplicationMaster
if spark_apps:
_, (_, tracking_url) = next(iteritems(spark_apps))
_, (_, tracking_url) = next(iter(spark_apps.items()))
base_url = self._get_request_url(tracking_url)
am_address = self._get_url_base(base_url)

Expand Down Expand Up @@ -372,7 +371,7 @@ def _get_spark_app_ids(self, running_apps, tags):
"""
spark_apps = {}
version_set = False
for app_id, (app_name, tracking_url) in iteritems(running_apps):
for app_id, (app_name, tracking_url) in running_apps.items():
try:
if not version_set:
version_set = self._collect_version(tracking_url, tags)
Expand All @@ -394,7 +393,7 @@ def _spark_job_metrics(self, running_apps, addl_tags):
"""
Get metrics for each Spark job.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
for app_id, (app_name, tracking_url) in running_apps.items():

base_url = self._get_request_url(tracking_url)
response = self._rest_request_to_json(
Expand Down Expand Up @@ -423,7 +422,7 @@ def _spark_stage_metrics(self, running_apps, addl_tags):
"""
Get metrics for each Spark stage.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
for app_id, (app_name, tracking_url) in running_apps.items():

base_url = self._get_request_url(tracking_url)
response = self._rest_request_to_json(
Expand All @@ -449,7 +448,7 @@ def _spark_executor_metrics(self, running_apps, addl_tags):
"""
Get metrics for each Spark executor.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
for app_id, (app_name, tracking_url) in running_apps.items():

base_url = self._get_request_url(tracking_url)
response = self._rest_request_to_json(
Expand Down Expand Up @@ -479,7 +478,7 @@ def _spark_rdd_metrics(self, running_apps, addl_tags):
"""
Get metrics for each Spark RDD.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
for app_id, (app_name, tracking_url) in running_apps.items():

base_url = self._get_request_url(tracking_url)
response = self._rest_request_to_json(
Expand All @@ -499,7 +498,7 @@ def _spark_streaming_statistics_metrics(self, running_apps, addl_tags):
"""
Get metrics for each application streaming statistics.
"""
for app_id, (app_name, tracking_url) in iteritems(running_apps):
for app_id, (app_name, tracking_url) in running_apps.items():
try:
base_url = self._get_request_url(tracking_url)
response = self._rest_request_to_json(
Expand All @@ -525,7 +524,7 @@ def _spark_structured_streams_metrics(self, running_apps, addl_tags):
- `SET spark.sql.streaming.metricsEnabled=true` in the app
"""

for app_name, tracking_url in itervalues(running_apps):
for app_name, tracking_url in running_apps.values():
try:
base_url = self._get_request_url(tracking_url)
response = self._rest_request_to_json(
Expand All @@ -534,10 +533,10 @@ def _spark_structured_streams_metrics(self, running_apps, addl_tags):
self.log.debug('Structured streaming metrics: %s', response)
response = {
metric_name: v['value']
for metric_name, v in iteritems(response.get('gauges'))
for metric_name, v in response.get('gauges').items()
if 'streaming' in metric_name and 'value' in v
}
for gauge_name, value in iteritems(response):
for gauge_name, value in response.items():
match = STRUCTURED_STREAMS_METRICS_REGEX.match(gauge_name)
if not match:
self.log.debug("No regex match found for gauge: '%s'", str(gauge_name))
Expand Down Expand Up @@ -576,7 +575,7 @@ def _set_metrics_from_json(self, tags, metrics_json, metrics):
if metrics_json is None:
return

for status, (metric_name, metric_type) in iteritems(metrics):
for status, (metric_name, metric_type) in metrics.items():
# Metrics defined with a dot `.` are exposed in a nested dictionary.
# {"foo": {"bar": "baz", "qux": "quux"}}
# foo.bar -> baz
Expand Down Expand Up @@ -625,7 +624,7 @@ def _rest_request(self, url, object_path, service_name, tags, *args, **kwargs):

# Add kwargs as arguments
if kwargs:
query = '&'.join(['{0}={1}'.format(key, value) for key, value in iteritems(kwargs)])
query = '&'.join(['{0}={1}'.format(key, value) for key, value in kwargs.items()])
url = urljoin(url, '?' + query)

try:
Expand Down
4 changes: 2 additions & 2 deletions spark/tests/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@
import ssl
import threading
import time
from http import server as BaseHTTPServer
from urllib.parse import parse_qsl, unquote_plus, urlencode, urljoin, urlparse, urlunparse

import mock
import pytest
import urllib3
from requests import RequestException
from six import iteritems
from six.moves import BaseHTTPServer
from six.moves.urllib.parse import parse_qsl, unquote_plus, urlencode, urljoin, urlparse, urlunparse

from datadog_checks.dev.http import MockResponse
from datadog_checks.dev.utils import get_metadata_metrics
Expand Down
4 changes: 2 additions & 2 deletions supervisord/datadog_checks/supervisord/supervisord.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
import re
import socket
import time
import xmlrpc.client as xmlrpclib
from collections import defaultdict

import supervisor.xmlrpc
from six.moves import xmlrpc_client as xmlrpclib

from datadog_checks.base import AgentCheck

Expand Down Expand Up @@ -206,7 +206,7 @@ def _connect(instance):
host = instance.get('host', DEFAULT_HOST)
port = instance.get('port', DEFAULT_PORT)
auth = '{}:{}@'.format(user, password) if user and password else ''
server = xmlrpclib.Server('http://{}{}:{}/RPC2'.format(auth, host, port))
server = xmlrpclib.ServerProxy('http://{}{}:{}/RPC2'.format(auth, host, port))
return server.supervisor

@staticmethod
Expand Down
4 changes: 2 additions & 2 deletions supervisord/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@

import os
from copy import deepcopy
from xmlrpc.client import ServerProxy

import pytest
from six.moves import xmlrpc_client as xmlrpclib

from datadog_checks.dev import docker_run
from datadog_checks.supervisord.supervisord import SupervisordCheck
Expand All @@ -32,6 +32,6 @@ def bad_instance():
@pytest.fixture(scope='session')
def dd_environment():
with docker_run(compose_file=os.path.join(HERE, 'compose', 'supervisord.yaml'), endpoints=URL, mount_logs=True):
server = xmlrpclib.Server('{}/RPC2'.format(URL))
server = ServerProxy('{}/RPC2'.format(URL))
server.supervisor.startAllProcesses()
yield SUPERVISORD_CONFIG
6 changes: 2 additions & 4 deletions supervisord/tests/test_supervisord_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)

import xmlrpc.client as xmlrpclib
from socket import socket

import mock
import pytest
from mock import patch
from six.moves import xmlrpc_client as xmlrpclib

from datadog_checks.supervisord.supervisord import FORMAT_TIME # pylint: disable=import-error,no-name-in-module

Expand All @@ -24,9 +24,7 @@ def mock_server(url, transport=None):
def test_check(aggregator, check):
"""Integration test for supervisord check. Using a mocked supervisord."""

with patch.object(xmlrpclib, 'Server', side_effect=mock_server), patch.object(
xmlrpclib, 'ServerProxy', side_effect=mock_server
):
with patch.object(xmlrpclib, 'ServerProxy', side_effect=mock_server):
for tc in TEST_CASES:
for instance in tc['instances']:
name = instance['name']
Expand Down
8 changes: 1 addition & 7 deletions teamcity/datadog_checks/teamcity/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,7 @@
import time
from collections import OrderedDict
from copy import deepcopy

from six import PY2

if PY2:
from urlparse import urlparse
else:
from urllib.parse import urlparse
from urllib.parse import urlparse

import requests

Expand Down
15 changes: 5 additions & 10 deletions tls/datadog_checks/tls/tls_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from datadog_checks.base.utils.time import get_timestamp

from .const import SERVICE_CHECK_CAN_CONNECT, SERVICE_CHECK_EXPIRATION, SERVICE_CHECK_VALIDATION
from .utils import closing


class TLSRemoteCheck(object):
Expand Down Expand Up @@ -58,13 +57,11 @@ def _get_cert_and_protocol_version(self, sock):
self.log.debug("Could not validate certificate because there is no connection")
return cert, protocol_version
# Get the cert & TLS version from the connection
with closing(sock):
with sock:
self.log.debug('Getting cert and TLS protocol version')
try:
with closing(
self.agent_check.get_tls_context().wrap_socket(
sock, server_hostname=self.agent_check._server_hostname
)
with self.agent_check.get_tls_context().wrap_socket(
sock, server_hostname=self.agent_check._server_hostname
) as secure_sock:
protocol_version = secure_sock.version()
der_cert = secure_sock.getpeercert(binary_form=True)
Expand Down Expand Up @@ -180,14 +177,12 @@ def fetch_intermediate_certs(self):
self.log.error('Error occurred while connecting to socket to discover intermediate certificates: %s', e)
return

with closing(sock):
with sock:
try:
context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS)
context.verify_mode = ssl.CERT_NONE

with closing(
context.wrap_socket(sock, server_hostname=self.agent_check._server_hostname)
) as secure_sock:
with context.wrap_socket(sock, server_hostname=self.agent_check._server_hostname) as secure_sock:
der_cert = secure_sock.getpeercert(binary_form=True)
protocol_version = secure_sock.version()
if protocol_version and protocol_version not in self.agent_check.allowed_versions:
Expand Down
16 changes: 1 addition & 15 deletions tls/datadog_checks/tls/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@

from ipaddress import ip_address

from six import PY2, text_type

# https://github.com/python/cpython/blob/ef516d11c1a0f885dba0aba8cf5366502077cdd4/Lib/ssl.py#L158-L165
DEFAULT_PROTOCOL_VERSIONS = {'TLSv1.2', 'TLSv1.3'}
SUPPORTED_PROTOCOL_VERSIONS = {'SSLv3', 'TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3'}
Expand Down Expand Up @@ -46,7 +44,7 @@ def get_protocol_versions(versions):

def is_ip_address(hostname):
try:
ip_address(text_type(hostname))
ip_address(str(hostname))
except ValueError:
return False

Expand All @@ -59,15 +57,3 @@ def days_to_seconds(days):

def seconds_to_days(seconds):
return seconds / 60 / 60 / 24


if PY2:
from contextlib import closing as _closing

def closing(sock):
return _closing(sock)

else:

def closing(sock):
return sock
5 changes: 2 additions & 3 deletions tls/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from urllib.parse import urlparse

from datadog_checks.dev import TempDir
from datadog_checks.tls.utils import closing


@contextmanager
Expand All @@ -31,8 +30,8 @@ def download_cert(filepath, host, raw=False):

for _ in range(20):
try:
with closing(socket.create_connection((host, 443))) as sock:
with closing(context.wrap_socket(sock, server_hostname=host)) as secure_sock:
with socket.create_connection((host, 443)) as sock:
with context.wrap_socket(sock, server_hostname=host) as secure_sock:
cert = secure_sock.getpeercert(binary_form=True)
except Exception: # no cov
time.sleep(3)
Expand Down
1 change: 0 additions & 1 deletion varnish/datadog_checks/varnish/varnish.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from os import geteuid

from packaging.version import Version
from six.moves import filter

from datadog_checks.base import ConfigurationError
from datadog_checks.base.checks import AgentCheck
Expand Down

0 comments on commit 8c600ef

Please sign in to comment.