diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 87d26b74..8f808d64 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -14,8 +14,6 @@ """Python library for Splunk.""" -from __future__ import absolute_import -from splunklib.six.moves import map import logging DEFAULT_LOG_FORMAT = '%(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, ' \ @@ -31,5 +29,35 @@ def setup_logging(level, log_format=DEFAULT_LOG_FORMAT, date_format=DEFAULT_DATE format=log_format, datefmt=date_format) + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """ + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, str): + return s.encode(encoding, errors) + + if isinstance(s, bytes): + return s + + raise TypeError(f"not expecting type '{type(s)}'") + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """ + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, bytes): + return s.decode(encoding, errors) + + if isinstance(s, str): + return s + + raise TypeError(f"not expecting type '{type(s)}'") + + __version_info__ = (1, 6, 19) + __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index 4a4098df..a387ecef 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -24,30 +24,22 @@ :mod:`splunklib.client` module. """ -from __future__ import absolute_import - import io import logging import socket import ssl -import sys import time from base64 import b64encode from contextlib import contextmanager from datetime import datetime from functools import wraps from io import BytesIO -from xml.etree.ElementTree import XML - -from splunklib import six -from splunklib.six.moves import urllib +from urllib import parse +from http import client +from http.cookies import SimpleCookie +from xml.etree.ElementTree import XML, ParseError -from .data import record - -try: - from xml.etree.ElementTree import ParseError -except ImportError as e: - from xml.parsers.expat import ExpatError as ParseError +from splunklib.data import record logger = logging.getLogger(__name__) @@ -56,7 +48,12 @@ "connect", "Context", "handler", - "HTTPError" + "HTTPError", + "UrlEncoded", + "_encode", + "_make_cookie_header", + "_NoAuthenticationToken", + "namespace" ] # If you change these, update the docstring @@ -65,14 +62,16 @@ DEFAULT_PORT = "8089" DEFAULT_SCHEME = "https" + def _log_duration(f): @wraps(f) def new_f(*args, **kwargs): start_time = datetime.now() val = f(*args, **kwargs) end_time = datetime.now() - logger.debug("Operation took %s", end_time-start_time) + logger.debug("Operation took %s", end_time - start_time) return val + return new_f @@ -92,8 +91,8 @@ def _parse_cookies(cookie_str, dictionary): :param dictionary: A dictionary to update with any found key-value pairs. :type dictionary: ``dict`` """ - parsed_cookie = six.moves.http_cookies.SimpleCookie(cookie_str) - for cookie in parsed_cookie.values(): + parsed_cookie = SimpleCookie(cookie_str) + for cookie in list(parsed_cookie.values()): dictionary[cookie.key] = cookie.coded_value @@ -114,10 +113,11 @@ def _make_cookie_header(cookies): :return: ``str` An HTTP header cookie string. :rtype: ``str`` """ - return "; ".join("%s=%s" % (key, value) for key, value in cookies) + return "; ".join(f"{key}={value}" for key, value in cookies) + # Singleton values to eschew None -class _NoAuthenticationToken(object): +class _NoAuthenticationToken: """The value stored in a :class:`Context` or :class:`splunklib.client.Service` class that is not logged in. @@ -129,7 +129,6 @@ class that is not logged in. Likewise, after a ``Context`` or ``Service`` object has been logged out, the token is set to this value again. """ - pass class UrlEncoded(str): @@ -155,7 +154,7 @@ class UrlEncoded(str): **Example**:: import urllib - UrlEncoded('%s://%s' % (scheme, urllib.quote(host)), skip_encode=True) + UrlEncoded(f'{scheme}://{urllib.quote(host)}', skip_encode=True) If you append ``str`` strings and ``UrlEncoded`` strings, the result is also URL encoded. @@ -165,6 +164,7 @@ class UrlEncoded(str): UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') """ + def __new__(self, val='', skip_encode=False, encode_slash=False): if isinstance(val, UrlEncoded): # Don't urllib.quote something already URL encoded. @@ -172,12 +172,12 @@ def __new__(self, val='', skip_encode=False, encode_slash=False): elif skip_encode: return str.__new__(self, val) elif encode_slash: - return str.__new__(self, urllib.parse.quote_plus(val)) + return str.__new__(self, parse.quote_plus(val)) else: - # When subclassing str, just call str's __new__ method + # When subclassing str, just call str.__new__ method # with your class and the value you want to have in the # new string. - return str.__new__(self, urllib.parse.quote(val)) + return str.__new__(self, parse.quote(val)) def __add__(self, other): """self + other @@ -187,8 +187,8 @@ def __add__(self, other): """ if isinstance(other, UrlEncoded): return UrlEncoded(str.__add__(self, other), skip_encode=True) - else: - return UrlEncoded(str.__add__(self, urllib.parse.quote(other)), skip_encode=True) + + return UrlEncoded(str.__add__(self, parse.quote(other)), skip_encode=True) def __radd__(self, other): """other + self @@ -198,8 +198,8 @@ def __radd__(self, other): """ if isinstance(other, UrlEncoded): return UrlEncoded(str.__radd__(self, other), skip_encode=True) - else: - return UrlEncoded(str.__add__(urllib.parse.quote(other), self), skip_encode=True) + + return UrlEncoded(str.__add__(parse.quote(other), self), skip_encode=True) def __mod__(self, fields): """Interpolation into ``UrlEncoded``s is disabled. @@ -208,15 +208,17 @@ def __mod__(self, fields): ``TypeError``. """ raise TypeError("Cannot interpolate into a UrlEncoded object.") + def __repr__(self): - return "UrlEncoded(%s)" % repr(urllib.parse.unquote(str(self))) + return f"UrlEncoded({repr(parse.unquote(str(self)))})" + @contextmanager def _handle_auth_error(msg): - """Handle reraising HTTP authentication errors as something clearer. + """Handle re-raising HTTP authentication errors as something clearer. If an ``HTTPError`` is raised with status 401 (access denied) in - the body of this context manager, reraise it as an + the body of this context manager, re-raise it as an ``AuthenticationError`` instead, with *msg* as its message. This function adds no round trips to the server. @@ -237,6 +239,7 @@ def _handle_auth_error(msg): else: raise + def _authentication(request_fun): """Decorator to handle autologin and authentication errors. @@ -271,10 +274,10 @@ def f(): return 42 print _authentication(f) """ + @wraps(request_fun) def wrapper(self, *args, **kwargs): - if self.token is _NoAuthenticationToken and \ - not self.has_cookies(): + if self.token is _NoAuthenticationToken and not self.has_cookies(): # Not yet logged in. if self.autologin and self.username and self.password: # This will throw an uncaught @@ -296,8 +299,8 @@ def wrapper(self, *args, **kwargs): # an AuthenticationError and give up. with _handle_auth_error("Autologin failed."): self.login() - with _handle_auth_error( - "Authentication Failed! If session token is used, it seems to have been expired."): + with _handle_auth_error("Autologin succeeded, but there was an auth error on next request. Something " + "is very wrong."): return request_fun(self, *args, **kwargs) elif he.status == 401 and not self.autologin: raise AuthenticationError( @@ -347,10 +350,10 @@ def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): """ if ':' in host: - # IPv6 addresses must be enclosed in [ ] in order to be well - # formed. + # IPv6 addresses must be enclosed in [ ] in order to be well-formed. host = '[' + host + ']' - return UrlEncoded("%s://%s:%s" % (scheme, host, port), skip_encode=True) + return UrlEncoded(f"{scheme}://{host}:{port}", skip_encode=True) + # kwargs: sharing, owner, app def namespace(sharing=None, owner=None, app=None, **kwargs): @@ -405,7 +408,7 @@ def namespace(sharing=None, owner=None, app=None, **kwargs): n = binding.namespace(sharing="global", app="search") """ if sharing in ["system"]: - return record({'sharing': sharing, 'owner': "nobody", 'app': "system" }) + return record({'sharing': sharing, 'owner': "nobody", 'app': "system"}) if sharing in ["global", "app"]: return record({'sharing': sharing, 'owner': "nobody", 'app': app}) if sharing in ["user", None]: @@ -413,7 +416,7 @@ def namespace(sharing=None, owner=None, app=None, **kwargs): raise ValueError("Invalid value for argument: 'sharing'") -class Context(object): +class Context: """This class represents a context that encapsulates a splunkd connection. The ``Context`` class encapsulates the details of HTTP requests, @@ -432,7 +435,7 @@ class Context(object): :type port: ``integer`` :param scheme: The scheme for accessing the service (the default is "https"). :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verrification for https connections. + :param verify: Enable (True) or disable (False) SSL verification for https connections. :type verify: ``Boolean`` :param sharing: The sharing mode for the namespace (the default is "user"). :type sharing: "global", "system", "app", or "user" @@ -475,12 +478,14 @@ class Context(object): # Or if you already have a valid cookie c = binding.Context(cookie="splunkd_8089=...") """ + def __init__(self, handler=None, **kwargs): self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), - cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), # Default to False for backward compat + cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), + # Default to False for backward compat retries=kwargs.get("retries", 0), retryDelay=kwargs.get("retryDelay", 10)) self.token = kwargs.get("token", _NoAuthenticationToken) - if self.token is None: # In case someone explicitly passes token=None + if self.token is None: # In case someone explicitly passes token=None self.token = _NoAuthenticationToken self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) self.host = kwargs.get("host", DEFAULT_HOST) @@ -513,7 +518,7 @@ def has_cookies(self): :rtype: ``bool`` """ auth_token_key = "splunkd_" - return any(auth_token_key in key for key in self.get_cookies().keys()) + return any(auth_token_key in key for key in list(self.get_cookies().keys())) # Shared per-context request headers @property @@ -529,10 +534,11 @@ def _auth_headers(self): if self.has_cookies(): return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] elif self.basic and (self.username and self.password): - token = 'Basic %s' % b64encode(("%s:%s" % (self.username, self.password)).encode('utf-8')).decode('ascii') + encoded_username_password = b64encode(f"{self.username}:{self.password}".encode('utf-8')).decode('ascii') + token = f'Basic {encoded_username_password}' return [("Authorization", token)] elif self.bearerToken: - token = 'Bearer %s' % self.bearerToken + token = f"Bearer {self.bearerToken}" return [("Authorization", token)] elif self.token is _NoAuthenticationToken: return [] @@ -541,7 +547,7 @@ def _auth_headers(self): if self.token.startswith('Splunk '): token = self.token else: - token = 'Splunk %s' % self.token + token = f"Splunk {self.token}" return [("Authorization", token)] def connect(self): @@ -834,12 +840,12 @@ def request(self, path_segment, method="GET", headers=None, body={}, headers = [] path = self.authority \ - + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) + + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) all_headers = headers + self.additional_headers + self._auth_headers logger.debug("%s request to %s (headers: %s, body: %s)", - method, path, str(all_headers), repr(body)) + method, path, str(all_headers), repr(body)) if body: body = _encode(**body) @@ -881,14 +887,14 @@ def login(self): """ if self.has_cookies() and \ - (not self.username and not self.password): + (not self.username and not self.password): # If we were passed session cookie(s), but no username or # password, then login is a nop, since we're automatically # logged in. return if self.token is not _NoAuthenticationToken and \ - (not self.username and not self.password): + (not self.username and not self.password): # If we were passed a session token, but no username or # password, then login is a nop, since we're automatically # logged in. @@ -910,11 +916,11 @@ def login(self): username=self.username, password=self.password, headers=self.additional_headers, - cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header + cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header body = response.body.read() session = XML(body).findtext("./sessionKey") - self.token = "Splunk %s" % session + self.token = f"Splunk {session}" return self except HTTPError as he: if he.status == 401: @@ -929,7 +935,7 @@ def logout(self): return self def _abspath(self, path_segment, - owner=None, app=None, sharing=None): + owner=None, app=None, sharing=None): """Qualifies *path_segment* into an absolute path for a URL. If *path_segment* is already absolute, returns it unchanged. @@ -981,12 +987,11 @@ def _abspath(self, path_segment, # namespace. If only one of app and owner is specified, use # '-' for the other. if ns.app is None and ns.owner is None: - return UrlEncoded("/services/%s" % path_segment, skip_encode=skip_encode) + return UrlEncoded(f"/services/{path_segment}", skip_encode=skip_encode) oname = "nobody" if ns.owner is None else ns.owner aname = "system" if ns.app is None else ns.app - path = UrlEncoded("/servicesNS/%s/%s/%s" % (oname, aname, path_segment), - skip_encode=skip_encode) + path = UrlEncoded(f"/servicesNS/{oname}/{aname}/{path_segment}", skip_encode=skip_encode) return path @@ -1037,21 +1042,23 @@ def connect(**kwargs): c.login() return c + # Note: the error response schema supports multiple messages but we only # return the first, although we do return the body so that an exception # handler that wants to read multiple messages can do so. class HTTPError(Exception): """This exception is raised for HTTP responses that return an error.""" + def __init__(self, response, _message=None): status = response.status reason = response.reason body = response.body.read() try: detail = XML(body).findtext("./messages/msg") - except ParseError as err: + except ParseError: detail = body - message = "HTTP %d %s%s" % ( - status, reason, "" if detail is None else " -- %s" % detail) + detail_formatted = "" if detail is None else f" -- {detail}" + message = f"HTTP {status} {reason}{detail_formatted}" Exception.__init__(self, _message or message) self.status = status self.reason = reason @@ -1059,6 +1066,7 @@ def __init__(self, response, _message=None): self.body = body self._response = response + class AuthenticationError(HTTPError): """Raised when a login request to Splunk fails. @@ -1066,6 +1074,7 @@ class AuthenticationError(HTTPError): in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, this exception is raised. """ + def __init__(self, message, cause): # Put the body back in the response so that HTTPError's constructor can # read it again. @@ -1073,6 +1082,7 @@ def __init__(self, message, cause): HTTPError.__init__(self, cause._response, message) + # # The HTTP interface used by the Splunk binding layer abstracts the underlying # HTTP library using request & response 'messages' which are implemented as @@ -1100,16 +1110,17 @@ def __init__(self, message, cause): # 'foo=1&foo=2&foo=3'. def _encode(**kwargs): items = [] - for key, value in six.iteritems(kwargs): + for key, value in list(kwargs.items()): if isinstance(value, list): items.extend([(key, item) for item in value]) else: items.append((key, value)) - return urllib.parse.urlencode(items) + return parse.urlencode(items) + # Crack the given url into (scheme, host, port, path) def _spliturl(url): - parsed_url = urllib.parse.urlparse(url) + parsed_url = parse.urlparse(url) host = parsed_url.hostname port = parsed_url.port path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path @@ -1118,9 +1129,10 @@ def _spliturl(url): if port is None: port = DEFAULT_PORT return parsed_url.scheme, host, port, path + # Given an HTTP request handler, this wrapper objects provides a related # family of convenience methods built using that handler. -class HttpLib(object): +class HttpLib: """A set of convenient methods for making HTTP calls. ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, @@ -1162,7 +1174,9 @@ class HttpLib(object): If using the default handler, SSL verification can be disabled by passing verify=False. """ - def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, retryDelay=10): + + def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, + retryDelay=10): if custom_handler is None: self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file, context=context) else: @@ -1223,7 +1237,7 @@ def get(self, url, headers=None, **kwargs): # the query to be encoded or it will get automatically URL # encoded by being appended to url. url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - return self.request(url, { 'method': "GET", 'headers': headers }) + return self.request(url, {'method': "GET", 'headers': headers}) def post(self, url, headers=None, **kwargs): """Sends a POST request to a URL. @@ -1319,6 +1333,7 @@ class ResponseReader(io.RawIOBase): types of HTTP libraries used with this SDK. This class also provides a preview of the stream and a few useful predicates. """ + # For testing, you can use a StringIO as the argument to # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It # will work equally well. @@ -1328,10 +1343,7 @@ def __init__(self, response, connection=None): self._buffer = b'' def __str__(self): - if six.PY2: - return self.read() - else: - return str(self.read(), 'UTF-8') + return str(self.read(), 'UTF-8') @property def empty(self): @@ -1357,7 +1369,7 @@ def close(self): self._connection.close() self._response.close() - def read(self, size = None): + def read(self, size=None): """Reads a given number of characters from the response. :param size: The number of characters to read, or "None" to read the @@ -1410,7 +1422,7 @@ def connect(scheme, host, port): kwargs = {} if timeout is not None: kwargs['timeout'] = timeout if scheme == "http": - return six.moves.http_client.HTTPConnection(host, port, **kwargs) + return client.HTTPConnection(host, port, **kwargs) if scheme == "https": if key_file is not None: kwargs['key_file'] = key_file if cert_file is not None: kwargs['cert_file'] = cert_file @@ -1421,8 +1433,8 @@ def connect(scheme, host, port): # verify is True in elif branch and context is not None kwargs['context'] = context - return six.moves.http_client.HTTPSConnection(host, port, **kwargs) - raise ValueError("unsupported scheme: %s" % scheme) + return client.HTTPSConnection(host, port, **kwargs) + raise ValueError(f"unsupported scheme: {scheme}") def request(url, message, **kwargs): scheme, host, port, path = _spliturl(url) @@ -1433,7 +1445,7 @@ def request(url, message, **kwargs): "User-Agent": "splunk-sdk-python/1.6.19", "Accept": "*/*", "Connection": "Close", - } # defaults + } # defaults for key, value in message["headers"]: head[key] = value method = message.get("method", "GET") diff --git a/splunklib/client.py b/splunklib/client.py index ab276c3e..5a7d6f0f 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -65,15 +65,13 @@ import socket from datetime import datetime, timedelta from time import sleep +from urllib import parse -from splunklib import six -from splunklib.six.moves import urllib - -from . import data -from .binding import (AuthenticationError, Context, HTTPError, UrlEncoded, - _encode, _make_cookie_header, _NoAuthenticationToken, - namespace) -from .data import record +from splunklib import data +from splunklib.data import record +from splunklib.binding import (AuthenticationError, Context, HTTPError, UrlEncoded, + _encode, _make_cookie_header, _NoAuthenticationToken, + namespace) logger = logging.getLogger(__name__) @@ -83,7 +81,8 @@ "OperationError", "IncomparableException", "Service", - "namespace" + "namespace", + "AuthenticationError" ] PATH_APPS = "apps/local/" @@ -104,7 +103,7 @@ PATH_MODULAR_INPUTS = "data/modular-inputs" PATH_ROLES = "authorization/roles/" PATH_SAVED_SEARCHES = "saved/searches/" -PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) +PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) PATH_USERS = "authentication/users/" PATH_RECEIVERS_STREAM = "/services/receivers/stream" PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" @@ -114,45 +113,38 @@ XNAME_ENTRY = XNAMEF_ATOM % "entry" XNAME_CONTENT = XNAMEF_ATOM % "content" -MATCH_ENTRY_CONTENT = "%s/%s/*" % (XNAME_ENTRY, XNAME_CONTENT) +MATCH_ENTRY_CONTENT = f"{XNAME_ENTRY}/{XNAME_CONTENT}/*" class IllegalOperationException(Exception): """Thrown when an operation is not possible on the Splunk instance that a :class:`Service` object is connected to.""" - pass class IncomparableException(Exception): """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and so on) of a type that doesn't support it.""" - pass class AmbiguousReferenceException(ValueError): """Thrown when the name used to fetch an entity matches more than one entity.""" - pass class InvalidNameException(Exception): """Thrown when the specified name contains characters that are not allowed in Splunk entity names.""" - pass class NoSuchCapability(Exception): """Thrown when the capability that has been referred to doesn't exist.""" - pass class OperationError(Exception): - """Raised for a failed operation, such as a time out.""" - pass + """Raised for a failed operation, such as a timeout.""" class NotSupportedError(Exception): """Raised for operations that are not supported on a given object.""" - pass def _trailing(template, *targets): @@ -188,8 +180,9 @@ def _trailing(template, *targets): def _filter_content(content, *args): if len(args) > 0: return record((k, content[k]) for k in args) - return record((k, v) for k, v in six.iteritems(content) - if k not in ['eai:acl', 'eai:attributes', 'type']) + return record((k, v) for k, v in list(content.items()) + if k not in ['eai:acl', 'eai:attributes', 'type']) + # Construct a resource path from the given base path + resource name def _path(base, name): @@ -248,7 +241,7 @@ def _parse_atom_entry(entry): metadata = _parse_atom_metadata(content) # Filter some of the noise out of the content record - content = record((k, v) for k, v in six.iteritems(content) + content = record((k, v) for k, v in list(content.items()) if k not in ['eai:acl', 'eai:attributes']) if 'type' in content: @@ -287,6 +280,7 @@ def _parse_atom_metadata(content): return record({'access': access, 'fields': fields}) + # kwargs: scheme, host, port, app, owner, username, password def connect(**kwargs): """This function connects and logs in to a Splunk instance. @@ -415,8 +409,9 @@ class Service(_BaseService): # Or if you already have a valid cookie s = client.Service(cookie="splunkd_8089=...") """ + def __init__(self, **kwargs): - super(Service, self).__init__(**kwargs) + super().__init__(**kwargs) self._splunk_version = None self._kvstore_owner = None @@ -584,7 +579,7 @@ def restart(self, timeout=None): :param timeout: A timeout period, in seconds. :type timeout: ``integer`` """ - msg = { "value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} + msg = {"value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} # This message will be deleted once the server actually restarts. self.messages.create(name="restart_required", **msg) result = self.post("/services/server/control/restart") @@ -708,7 +703,6 @@ def kvstore_owner(self, value): kvstore is refreshed, when the owner value is changed """ self._kvstore_owner = value - self.kvstore @property def kvstore(self): @@ -730,13 +724,14 @@ def users(self): return Users(self) -class Endpoint(object): +class Endpoint: """This class represents individual Splunk resources in the Splunk REST API. An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. This class provides the common functionality of :class:`Collection` and :class:`Entity` (essentially HTTP GET and POST methods). """ + def __init__(self, service, path): self.service = service self.path = path @@ -956,14 +951,12 @@ def __eq__(self, other): but then ``x != saved_searches['asearch']``. whether or not there was a change on the server. Rather than - try to do something fancy, we simple declare that equality is + try to do something fancy, we simply declare that equality is undefined for Entities. Makes no roundtrips to the server. """ - raise IncomparableException( - "Equality is undefined for objects of class %s" % \ - self.__class__.__name__) + raise IncomparableException(f"Equality is undefined for objects of class {self.__class__.__name__}") def __getattr__(self, key): # Called when an attribute was not found by the normal method. In this @@ -989,7 +982,7 @@ def _load_atom_entry(self, response): apps = [ele.entry.content.get('eai:appName') for ele in elem] raise AmbiguousReferenceException( - "Fetch from server returned multiple entries for name '%s' in apps %s." % (elem[0].entry.title, apps)) + f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") else: return elem.entry @@ -1024,7 +1017,7 @@ def _proper_namespace(self, owner=None, app=None, sharing=None): :param sharing: :return: """ - if owner is None and app is None and sharing is None: # No namespace provided + if owner is None and app is None and sharing is None: # No namespace provided if self._state is not None and 'access' in self._state: return (self._state.access.owner, self._state.access.app, @@ -1034,7 +1027,7 @@ def _proper_namespace(self, owner=None, app=None, sharing=None): self.service.namespace['app'], self.service.namespace['sharing']) else: - return (owner,app,sharing) + return owner, app, sharing def delete(self): owner, app, sharing = self._proper_namespace() @@ -1042,11 +1035,11 @@ def delete(self): def get(self, path_segment="", owner=None, app=None, sharing=None, **query): owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super(Entity, self).get(path_segment, owner=owner, app=app, sharing=sharing, **query) + return super().get(path_segment, owner=owner, app=app, sharing=sharing, **query) def post(self, path_segment="", owner=None, app=None, sharing=None, **query): owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super(Entity, self).post(path_segment, owner=owner, app=app, sharing=sharing, **query) + return super().post(path_segment, owner=owner, app=app, sharing=sharing, **query) def refresh(self, state=None): """Refreshes the state of this entity. @@ -1137,7 +1130,7 @@ def read(self, response): # text to be dispatched via HTTP. However, these links are already # URL encoded when they arrive, and we need to mark them as such. unquoted_links = dict([(k, UrlEncoded(v, skip_encode=True)) - for k,v in six.iteritems(results['links'])]) + for k, v in list(results['links'].items())]) results['links'] = unquoted_links return results @@ -1182,7 +1175,7 @@ def update(self, **kwargs): """ # The peculiarity in question: the REST API creates a new # Entity if we pass name in the dictionary, instead of the - # expected behavior of updating this Entity. Therefore we + # expected behavior of updating this Entity. Therefore, we # check for 'name' in kwargs and throw an error if it is # there. if 'name' in kwargs: @@ -1195,9 +1188,10 @@ class ReadOnlyCollection(Endpoint): """This class represents a read-only collection of entities in the Splunk instance. """ + def __init__(self, service, path, item=Entity): Endpoint.__init__(self, service, path) - self.item = item # Item accessor + self.item = item # Item accessor self.null_count = -1 def __contains__(self, name): @@ -1229,7 +1223,7 @@ def __getitem__(self, key): name. Where there is no conflict, ``__getitem__`` will fetch the - entity given just the name. If there is a conflict and you + entity given just the name. If there is a conflict, and you pass just a name, it will raise a ``ValueError``. In that case, add the namespace as a second argument. @@ -1276,13 +1270,14 @@ def __getitem__(self, key): response = self.get(key) entries = self._load_list(response) if len(entries) > 1: - raise AmbiguousReferenceException("Found multiple entities named '%s'; please specify a namespace." % key) + raise AmbiguousReferenceException( + f"Found multiple entities named '{key}'; please specify a namespace.") elif len(entries) == 0: raise KeyError(key) else: return entries[0] except HTTPError as he: - if he.status == 404: # No entity matching key and namespace. + if he.status == 404: # No entity matching key and namespace. raise KeyError(key) else: raise @@ -1346,7 +1341,7 @@ def _entity_path(self, state): # This has been factored out so that it can be easily # overloaded by Configurations, which has to switch its # entities' endpoints from its own properties/ to configs/. - raw_path = urllib.parse.unquote(state.links.alternate) + raw_path = parse.unquote(state.links.alternate) if 'servicesNS/' in raw_path: return _trailing(raw_path, 'servicesNS/', '/', '/') elif 'services/' in raw_path: @@ -1515,8 +1510,6 @@ def list(self, count=None, **kwargs): return list(self.iter(count=count, **kwargs)) - - class Collection(ReadOnlyCollection): """A collection of entities. @@ -1590,8 +1583,8 @@ def create(self, name, **params): applications = s.apps new_app = applications.create("my_fake_app") """ - if not isinstance(name, six.string_types): - raise InvalidNameException("%s is not a valid name for an entity." % name) + if not isinstance(name, str): + raise InvalidNameException(f"{name} is not a valid name for an entity.") if 'namespace' in params: namespace = params.pop('namespace') params['owner'] = namespace.owner @@ -1650,7 +1643,7 @@ def delete(self, name, **params): # has already been deleted, and we reraise it as a # KeyError. if he.status == 404: - raise KeyError("No such entity %s" % name) + raise KeyError(f"No such entity {name}") else: raise return self @@ -1701,14 +1694,13 @@ def get(self, name="", owner=None, app=None, sharing=None, **query): """ name = UrlEncoded(name, encode_slash=True) - return super(Collection, self).get(name, owner, app, sharing, **query) - - + return super().get(name, owner, app, sharing, **query) class ConfigurationFile(Collection): """This class contains all of the stanzas from one configuration file. """ + # __init__'s arguments must match those of an Entity, not a # Collection, since it is being created as the elements of a # Configurations, which is a Collection subclass. @@ -1725,6 +1717,7 @@ class Configurations(Collection): stanzas. This collection is unusual in that the values in it are themselves collections of :class:`ConfigurationFile` objects. """ + def __init__(self, service): Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) if self.service.namespace.owner == '-' or self.service.namespace.app == '-': @@ -1742,7 +1735,7 @@ def __getitem__(self, key): response = self.get(key) return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) except HTTPError as he: - if he.status == 404: # No entity matching key + if he.status == 404: # No entity matching key raise KeyError(key) else: raise @@ -1754,7 +1747,7 @@ def __contains__(self, key): response = self.get(key) return True except HTTPError as he: - if he.status == 404: # No entity matching key + if he.status == 404: # No entity matching key return False else: raise @@ -1773,15 +1766,15 @@ def create(self, name): # This has to be overridden to handle the plumbing of creating # a ConfigurationFile (which is a Collection) instead of some # Entity. - if not isinstance(name, six.string_types): - raise ValueError("Invalid name: %s" % repr(name)) + if not isinstance(name, str): + raise ValueError(f"Invalid name: {repr(name)}") response = self.post(__conf=name) if response.status == 303: return self[name] elif response.status == 201: return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) else: - raise ValueError("Unexpected status code %s returned from creating a stanza" % response.status) + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") def delete(self, key): """Raises `IllegalOperationException`.""" @@ -1813,17 +1806,18 @@ def __len__(self): # The stanza endpoint returns all the keys at the same level in the XML as the eai information # and 'disabled', so to get an accurate length, we have to filter those out and have just # the stanza keys. - return len([x for x in self._state.content.keys() + return len([x for x in list(self._state.content.keys()) if not x.startswith('eai') and x != 'disabled']) class StoragePassword(Entity): """This class contains a storage password. """ + def __init__(self, service, path, **kwargs): state = kwargs.get('state', None) kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) - super(StoragePassword, self).__init__(service, path, **kwargs) + super().__init__(service, path, **kwargs) self._state = state @property @@ -1847,10 +1841,11 @@ class StoragePasswords(Collection): """This class provides access to the storage passwords from this Splunk instance. Retrieve this collection using :meth:`Service.storage_passwords`. """ + def __init__(self, service): if service.namespace.owner == '-' or service.namespace.app == '-': raise ValueError("StoragePasswords cannot have wildcards in namespace.") - super(StoragePasswords, self).__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) + super().__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) def create(self, password, username, realm=None): """ Creates a storage password. @@ -1867,8 +1862,8 @@ def create(self, password, username, realm=None): :return: The :class:`StoragePassword` object created. """ - if not isinstance(username, six.string_types): - raise ValueError("Invalid name: %s" % repr(username)) + if not isinstance(username, str): + raise ValueError(f"Invalid name: {repr(username)}") if realm is None: response = self.post(password=password, name=username) @@ -1876,7 +1871,7 @@ def create(self, password, username, realm=None): response = self.post(password=password, realm=realm, name=username) if response.status != 201: - raise ValueError("Unexpected status code %s returned from creating a stanza" % response.status) + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") entries = _load_atom_entries(response) state = _parse_atom_entry(entries[0]) @@ -1916,6 +1911,7 @@ def delete(self, username, realm=None): class AlertGroup(Entity): """This class represents a group of fired alerts for a saved search. Access it using the :meth:`alerts` property.""" + def __init__(self, service, path, **kwargs): Entity.__init__(self, service, path, **kwargs) @@ -1944,6 +1940,7 @@ class Indexes(Collection): """This class contains the collection of indexes in this Splunk instance. Retrieve this collection using :meth:`Service.indexes`. """ + def get_default(self): """ Returns the name of the default index. @@ -1971,6 +1968,7 @@ def delete(self, name): class Index(Entity): """This class represents an index and provides different operations, such as cleaning the index, writing to the index, and so forth.""" + def __init__(self, service, path, **kwargs): Entity.__init__(self, service, path, **kwargs) @@ -1987,26 +1985,26 @@ def attach(self, host=None, source=None, sourcetype=None): :return: A writable socket. """ - args = { 'index': self.name } + args = {'index': self.name} if host is not None: args['host'] = host if source is not None: args['source'] = source if sourcetype is not None: args['sourcetype'] = sourcetype - path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + urllib.parse.urlencode(args), skip_encode=True) + path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + parse.urlencode(args), skip_encode=True) - cookie_or_auth_header = "Authorization: Splunk %s\r\n" % \ - (self.service.token if self.service.token is _NoAuthenticationToken - else self.service.token.replace("Splunk ", "")) + cookie_header = self.service.token if self.service.token is _NoAuthenticationToken else self.service.token.replace("Splunk ", "") + cookie_or_auth_header = f"Authorization: Splunk {cookie_header}\r\n" # If we have cookie(s), use them instead of "Authorization: ..." if self.service.has_cookies(): - cookie_or_auth_header = "Cookie: %s\r\n" % _make_cookie_header(self.service.get_cookies().items()) + cookie_header = _make_cookie_header(list(self.service.get_cookies().items())) + cookie_or_auth_header = f"Cookie: {cookie_header}\r\n" # Since we need to stream to the index connection, we have to keep # the connection open and use the Splunk extension headers to note # the input mode sock = self.service.connect() - headers = [("POST %s HTTP/1.1\r\n" % str(self.service._abspath(path))).encode('utf-8'), - ("Host: %s:%s\r\n" % (self.service.host, int(self.service.port))).encode('utf-8'), + headers = [f"POST {str(self.service._abspath(path))} HTTP/1.1\r\n".encode('utf-8'), + f"Host: {self.service.host}:{int(self.service.port)}\r\n".encode('utf-8'), b"Accept-Encoding: identity\r\n", cookie_or_auth_header.encode('utf-8'), b"X-Splunk-Input-Mode: Streaming\r\n", @@ -2068,8 +2066,7 @@ def clean(self, timeout=60): ftp = self['frozenTimePeriodInSecs'] was_disabled_initially = self.disabled try: - if (not was_disabled_initially and \ - self.service.splunk_version < (5,)): + if not was_disabled_initially and self.service.splunk_version < (5,): # Need to disable the index first on Splunk 4.x, # but it doesn't work to disable it on 5.0. self.disable() @@ -2079,17 +2076,17 @@ def clean(self, timeout=60): # Wait until event count goes to 0. start = datetime.now() diff = timedelta(seconds=timeout) - while self.content.totalEventCount != '0' and datetime.now() < start+diff: + while self.content.totalEventCount != '0' and datetime.now() < start + diff: sleep(1) self.refresh() if self.content.totalEventCount != '0': - raise OperationError("Cleaning index %s took longer than %s seconds; timing out." % (self.name, timeout)) + raise OperationError( + f"Cleaning index {self.name} took longer than {timeout} seconds; timing out.") finally: # Restore original values self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) - if (not was_disabled_initially and \ - self.service.splunk_version < (5,)): + if not was_disabled_initially and self.service.splunk_version < (5,): # Re-enable the index if it was originally enabled and we messed with it. self.enable() @@ -2117,7 +2114,7 @@ def submit(self, event, host=None, source=None, sourcetype=None): :return: The :class:`Index`. """ - args = { 'index': self.name } + args = {'index': self.name} if host is not None: args['host'] = host if source is not None: args['source'] = source if sourcetype is not None: args['sourcetype'] = sourcetype @@ -2151,6 +2148,7 @@ class Input(Entity): typed input classes and is also used when the client does not recognize an input kind. """ + def __init__(self, service, path, kind=None, **kwargs): # kind can be omitted (in which case it is inferred from the path) # Otherwise, valid values are the paths from data/inputs ("udp", @@ -2161,7 +2159,7 @@ def __init__(self, service, path, kind=None, **kwargs): path_segments = path.split('/') i = path_segments.index('inputs') + 1 if path_segments[i] == 'tcp': - self.kind = path_segments[i] + '/' + path_segments[i+1] + self.kind = path_segments[i] + '/' + path_segments[i + 1] else: self.kind = path_segments[i] else: @@ -2187,7 +2185,7 @@ def update(self, **kwargs): # UDP and TCP inputs require special handling due to their restrictToHost # field. For all other inputs kinds, we can dispatch to the superclass method. if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: - return super(Input, self).update(**kwargs) + return super().update(**kwargs) else: # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. # In Splunk 4.x, the name of the entity is only the port, independent of the value of @@ -2209,7 +2207,7 @@ def update(self, **kwargs): to_update['restrictToHost'] = self._state.content['restrictToHost'] # Do the actual update operation. - return super(Input, self).update(**to_update) + return super().update(**to_update) # Inputs is a "kinded" collection, which is a heterogenous collection where @@ -2236,13 +2234,13 @@ def __getitem__(self, key): response = self.get(self.kindpath(kind) + "/" + key) entries = self._load_list(response) if len(entries) > 1: - raise AmbiguousReferenceException("Found multiple inputs of kind %s named %s." % (kind, key)) + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") elif len(entries) == 0: raise KeyError((key, kind)) else: return entries[0] except HTTPError as he: - if he.status == 404: # No entity matching kind and key + if he.status == 404: # No entity matching kind and key raise KeyError((key, kind)) else: raise @@ -2256,20 +2254,21 @@ def __getitem__(self, key): response = self.get(kind + "/" + key) entries = self._load_list(response) if len(entries) > 1: - raise AmbiguousReferenceException("Found multiple inputs of kind %s named %s." % (kind, key)) + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") elif len(entries) == 0: pass else: - if candidate is not None: # Already found at least one candidate - raise AmbiguousReferenceException("Found multiple inputs named %s, please specify a kind" % key) + if candidate is not None: # Already found at least one candidate + raise AmbiguousReferenceException( + f"Found multiple inputs named {key}, please specify a kind") candidate = entries[0] except HTTPError as he: if he.status == 404: - pass # Just carry on to the next kind. + pass # Just carry on to the next kind. else: raise if candidate is None: - raise KeyError(key) # Never found a match. + raise KeyError(key) # Never found a match. else: return candidate @@ -2295,7 +2294,7 @@ def __contains__(self, key): pass except HTTPError as he: if he.status == 404: - pass # Just carry on to the next kind. + pass # Just carry on to the next kind. else: raise return False @@ -2347,9 +2346,8 @@ def create(self, name, kind, **kwargs): name = UrlEncoded(name, encode_slash=True) path = _path( self.path + kindpath, - '%s:%s' % (kwargs['restrictToHost'], name) \ - if 'restrictToHost' in kwargs else name - ) + f"{kwargs['restrictToHost']}:{name}" if 'restrictToHost' in kwargs else name + ) return Input(self.service, path, kind) def delete(self, name, kind=None): @@ -2419,7 +2417,7 @@ def itemmeta(self, kind): :return: The metadata. :rtype: class:``splunklib.data.Record`` """ - response = self.get("%s/_new" % self._kindmap[kind]) + response = self.get(f"{self._kindmap[kind]}/_new") content = _load_atom(response, MATCH_ENTRY_CONTENT) return _parse_atom_metadata(content) @@ -2434,7 +2432,7 @@ def _get_kind_list(self, subpath=None): this_subpath = subpath + [entry.title] # The "all" endpoint doesn't work yet. # The "tcp/ssl" endpoint is not a real input collection. - if entry.title == 'all' or this_subpath == ['tcp','ssl']: + if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: continue elif 'create' in [x.rel for x in entry.link]: path = '/'.join(subpath + [entry.title]) @@ -2556,18 +2554,18 @@ def list(self, *kinds, **kwargs): path = UrlEncoded(path, skip_encode=True) response = self.get(path, **kwargs) except HTTPError as he: - if he.status == 404: # No inputs of this kind + if he.status == 404: # No inputs of this kind return [] entities = [] entries = _load_atom_entries(response) if entries is None: - return [] # No inputs in a collection comes back with no feed or entry in the XML + return [] # No inputs in a collection comes back with no feed or entry in the XML for entry in entries: state = _parse_atom_entry(entry) # Unquote the URL, since all URL encoded in the SDK # should be of type UrlEncoded, and all str should not # be URL encoded. - path = urllib.parse.unquote(state.links.alternate) + path = parse.unquote(state.links.alternate) entity = Input(self.service, path, kind, state=state) entities.append(entity) return entities @@ -2582,18 +2580,18 @@ def list(self, *kinds, **kwargs): response = self.get(self.kindpath(kind), search=search) except HTTPError as e: if e.status == 404: - continue # No inputs of this kind + continue # No inputs of this kind else: raise entries = _load_atom_entries(response) - if entries is None: continue # No inputs to process + if entries is None: continue # No inputs to process for entry in entries: state = _parse_atom_entry(entry) # Unquote the URL, since all URL encoded in the SDK # should be of type UrlEncoded, and all str should not # be URL encoded. - path = urllib.parse.unquote(state.links.alternate) + path = parse.unquote(state.links.alternate) entity = Input(self.service, path, kind, state=state) entities.append(entity) if 'offset' in kwargs: @@ -2661,6 +2659,7 @@ def oneshot(self, path, **kwargs): class Job(Entity): """This class represents a search job.""" + def __init__(self, service, sid, **kwargs): path = PATH_JOBS + sid Entity.__init__(self, service, path, skip_refresh=True, **kwargs) @@ -2933,6 +2932,7 @@ def unpause(self): class Jobs(Collection): """This class represents a collection of search jobs. Retrieve this collection using :meth:`Service.jobs`.""" + def __init__(self, service): Collection.__init__(self, service, PATH_JOBS, item=Job) # The count value to say list all the contents of this @@ -3086,6 +3086,7 @@ def oneshot(self, query, **params): class Loggers(Collection): """This class represents a collection of service logging categories. Retrieve this collection using :meth:`Service.loggers`.""" + def __init__(self, service): Collection.__init__(self, service, PATH_LOGGER) @@ -3117,6 +3118,7 @@ class ModularInputKind(Entity): """This class contains the different types of modular inputs. Retrieve this collection using :meth:`Service.modular_input_kinds`. """ + def __contains__(self, name): args = self.state.content['endpoints']['args'] if name in args: @@ -3154,6 +3156,7 @@ def update(self, **kwargs): class SavedSearch(Entity): """This class represents a saved search.""" + def __init__(self, service, path, **kwargs): Entity.__init__(self, service, path, **kwargs) @@ -3307,6 +3310,7 @@ def unsuppress(self): class SavedSearches(Collection): """This class represents a collection of saved searches. Retrieve this collection using :meth:`Service.saved_searches`.""" + def __init__(self, service): Collection.__init__( self, service, PATH_SAVED_SEARCHES, item=SavedSearch) @@ -3331,6 +3335,7 @@ def create(self, name, search, **kwargs): class Settings(Entity): """This class represents configuration settings for a Splunk service. Retrieve this collection using :meth:`Service.settings`.""" + def __init__(self, service, **kwargs): Entity.__init__(self, service, "/services/server/settings", **kwargs) @@ -3352,6 +3357,7 @@ def update(self, **kwargs): class User(Entity): """This class represents a Splunk user. """ + @property def role_entities(self): """Returns a list of roles assigned to this user. @@ -3368,6 +3374,7 @@ class Users(Collection): """This class represents the collection of Splunk users for this instance of Splunk. Retrieve this collection using :meth:`Service.users`. """ + def __init__(self, service): Collection.__init__(self, service, PATH_USERS, item=User) @@ -3407,8 +3414,8 @@ def create(self, username, password, roles, **params): boris = users.create("boris", "securepassword", roles="user") hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) """ - if not isinstance(username, six.string_types): - raise ValueError("Invalid username: %s" % str(username)) + if not isinstance(username, str): + raise ValueError(f"Invalid username: {str(username)}") username = username.lower() self.post(name=username, password=password, roles=roles, **params) # splunkd doesn't return the user in the POST response body, @@ -3418,7 +3425,7 @@ def create(self, username, password, roles, **params): state = _parse_atom_entry(entry) entity = self.item( self.service, - urllib.parse.unquote(state.links.alternate), + parse.unquote(state.links.alternate), state=state) return entity @@ -3437,6 +3444,7 @@ def delete(self, name): class Role(Entity): """This class represents a user role. """ + def grant(self, *capabilities_to_grant): """Grants additional capabilities to this role. @@ -3487,8 +3495,8 @@ def revoke(self, *capabilities_to_revoke): for c in old_capabilities: if c not in capabilities_to_revoke: new_capabilities.append(c) - if new_capabilities == []: - new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. + if not new_capabilities: + new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. self.post(capabilities=new_capabilities) return self @@ -3496,6 +3504,7 @@ def revoke(self, *capabilities_to_revoke): class Roles(Collection): """This class represents the collection of roles in the Splunk instance. Retrieve this collection using :meth:`Service.roles`.""" + def __init__(self, service): return Collection.__init__(self, service, PATH_ROLES, item=Role) @@ -3530,8 +3539,8 @@ def create(self, name, **params): roles = c.roles paltry = roles.create("paltry", imported_roles="user", defaultApp="search") """ - if not isinstance(name, six.string_types): - raise ValueError("Invalid role name: %s" % str(name)) + if not isinstance(name, str): + raise ValueError(f"Invalid role name: {str(name)}") name = name.lower() self.post(name=name, **params) # splunkd doesn't return the user in the POST response body, @@ -3541,7 +3550,7 @@ def create(self, name, **params): state = _parse_atom_entry(entry) entity = self.item( self.service, - urllib.parse.unquote(state.links.alternate), + parse.unquote(state.links.alternate), state=state) return entity @@ -3558,6 +3567,7 @@ def delete(self, name): class Application(Entity): """Represents a locally-installed Splunk app.""" + @property def setupInfo(self): """Returns the setup information for the app. @@ -3574,11 +3584,12 @@ def updateInfo(self): """Returns any update information that is available for the app.""" return self._run_action("update") + class KVStoreCollections(Collection): def __init__(self, service): Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) - def create(self, name, indexes = {}, fields = {}, **kwargs): + def create(self, name, indexes={}, fields={}, **kwargs): """Creates a KV Store Collection. :param name: name of collection to create @@ -3592,14 +3603,15 @@ def create(self, name, indexes = {}, fields = {}, **kwargs): :return: Result of POST request """ - for k, v in six.iteritems(indexes): + for k, v in list(indexes.items()): if isinstance(v, dict): v = json.dumps(v) kwargs['index.' + k] = v - for k, v in six.iteritems(fields): + for k, v in list(fields.items()): kwargs['field.' + k] = v return self.post(name=name, **kwargs) + class KVStoreCollection(Entity): @property def data(self): @@ -3620,7 +3632,7 @@ def update_index(self, name, value): :return: Result of POST request """ kwargs = {} - kwargs['index.' + name] = value if isinstance(value, six.string_types) else json.dumps(value) + kwargs['index.' + name] = value if isinstance(value, str) else json.dumps(value) return self.post(**kwargs) def update_field(self, name, value): @@ -3637,7 +3649,8 @@ def update_field(self, name, value): kwargs['field.' + name] = value return self.post(**kwargs) -class KVStoreCollectionData(object): + +class KVStoreCollectionData: """This class represents the data endpoint for a KVStoreCollection. Retrieve using :meth:`KVStoreCollection.data` @@ -3670,7 +3683,7 @@ def query(self, **query): :rtype: ``array`` """ - for key, value in query.items(): + for key, value in list(query.items()): if isinstance(query[key], dict): query[key] = json.dumps(value) @@ -3700,7 +3713,8 @@ def insert(self, data): """ if isinstance(data, dict): data = json.dumps(data) - return json.loads(self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads( + self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) def delete(self, query=None): """ @@ -3738,7 +3752,8 @@ def update(self, id, data): """ if isinstance(data, dict): data = json.dumps(data) - return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, + body=data).body.read().decode('utf-8')) def batch_find(self, *dbqueries): """ @@ -3755,7 +3770,8 @@ def batch_find(self, *dbqueries): data = json.dumps(dbqueries) - return json.loads(self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads( + self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) def batch_save(self, *documents): """ @@ -3772,4 +3788,5 @@ def batch_save(self, *documents): data = json.dumps(documents) - return json.loads(self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads( + self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) diff --git a/splunklib/data.py b/splunklib/data.py index f9ffb869..14e8a793 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -12,16 +12,13 @@ # License for the specific language governing permissions and limitations # under the License. -"""The **splunklib.data** module reads the responses from splunkd in Atom Feed +"""The **splunklib.data** module reads the responses from splunkd in Atom Feed format, which is the format used by most of the REST API. """ -from __future__ import absolute_import -import sys from xml.etree.ElementTree import XML -from splunklib import six -__all__ = ["load"] +__all__ = ["load", "record"] # LNAME refers to element names without namespaces; XNAME is the same # name, but with an XML namespace. @@ -36,33 +33,41 @@ XNAME_KEY = XNAMEF_REST % LNAME_KEY XNAME_LIST = XNAMEF_REST % LNAME_LIST + # Some responses don't use namespaces (eg: search/parse) so we look for # both the extended and local versions of the following names. + def isdict(name): - return name == XNAME_DICT or name == LNAME_DICT + return name in (XNAME_DICT, LNAME_DICT) + def isitem(name): - return name == XNAME_ITEM or name == LNAME_ITEM + return name in (XNAME_ITEM, LNAME_ITEM) + def iskey(name): - return name == XNAME_KEY or name == LNAME_KEY + return name in (XNAME_KEY, LNAME_KEY) + def islist(name): - return name == XNAME_LIST or name == LNAME_LIST + return name in (XNAME_LIST, LNAME_LIST) + def hasattrs(element): return len(element.attrib) > 0 + def localname(xname): rcurly = xname.find('}') - return xname if rcurly == -1 else xname[rcurly+1:] + return xname if rcurly == -1 else xname[rcurly + 1:] + def load(text, match=None): - """This function reads a string that contains the XML of an Atom Feed, then - returns the - data in a native Python structure (a ``dict`` or ``list``). If you also - provide a tag name or path to match, only the matching sub-elements are + """This function reads a string that contains the XML of an Atom Feed, then + returns the + data in a native Python structure (a ``dict`` or ``list``). If you also + provide a tag name or path to match, only the matching sub-elements are loaded. :param text: The XML text to load. @@ -78,30 +83,28 @@ def load(text, match=None): 'names': {} } - # Convert to unicode encoding in only python 2 for xml parser - if(sys.version_info < (3, 0, 0) and isinstance(text, unicode)): - text = text.encode('utf-8') - root = XML(text) items = [root] if match is None else root.findall(match) count = len(items) - if count == 0: + if count == 0: return None - elif count == 1: + elif count == 1: return load_root(items[0], nametable) else: return [load_root(item, nametable) for item in items] + # Load the attributes of the given element. def load_attrs(element): if not hasattrs(element): return None attrs = record() - for key, value in six.iteritems(element.attrib): + for key, value in list(element.attrib.items()): attrs[key] = value return attrs + # Parse a element and return a Python dict -def load_dict(element, nametable = None): +def load_dict(element, nametable=None): value = record() children = list(element) for child in children: @@ -110,6 +113,7 @@ def load_dict(element, nametable = None): value[name] = load_value(child, nametable) return value + # Loads the given elements attrs & value into single merged dict. def load_elem(element, nametable=None): name = localname(element.tag) @@ -118,12 +122,12 @@ def load_elem(element, nametable=None): if attrs is None: return name, value if value is None: return name, attrs # If value is simple, merge into attrs dict using special key - if isinstance(value, six.string_types): + if isinstance(value, str): attrs["$text"] = value return name, attrs # Both attrs & value are complex, so merge the two dicts, resolving collisions. collision_keys = [] - for key, val in six.iteritems(attrs): + for key, val in list(attrs.items()): if key in value and key in collision_keys: value[key].append(val) elif key in value and key not in collision_keys: @@ -133,6 +137,7 @@ def load_elem(element, nametable=None): value[key] = val return name, value + # Parse a element and return a Python list def load_list(element, nametable=None): assert islist(element.tag) @@ -143,6 +148,7 @@ def load_list(element, nametable=None): value.append(load_value(child, nametable)) return value + # Load the given root element. def load_root(element, nametable=None): tag = element.tag @@ -151,6 +157,7 @@ def load_root(element, nametable=None): k, v = load_elem(element, nametable) return Record.fromkv(k, v) + # Load the children of the given element. def load_value(element, nametable=None): children = list(element) @@ -159,7 +166,7 @@ def load_value(element, nametable=None): # No children, assume a simple text value if count == 0: text = element.text - if text is None: + if text is None: return None if len(text.strip()) == 0: @@ -179,7 +186,7 @@ def load_value(element, nametable=None): # If we have seen this name before, promote the value to a list if name in value: current = value[name] - if not isinstance(current, list): + if not isinstance(current, list): value[name] = [current] value[name].append(item) else: @@ -187,23 +194,24 @@ def load_value(element, nametable=None): return value + # A generic utility that enables "dot" access to dicts class Record(dict): - """This generic utility class enables dot access to members of a Python + """This generic utility class enables dot access to members of a Python dictionary. - Any key that is also a valid Python identifier can be retrieved as a field. - So, for an instance of ``Record`` called ``r``, ``r.key`` is equivalent to - ``r['key']``. A key such as ``invalid-key`` or ``invalid.key`` cannot be - retrieved as a field, because ``-`` and ``.`` are not allowed in + Any key that is also a valid Python identifier can be retrieved as a field. + So, for an instance of ``Record`` called ``r``, ``r.key`` is equivalent to + ``r['key']``. A key such as ``invalid-key`` or ``invalid.key`` cannot be + retrieved as a field, because ``-`` and ``.`` are not allowed in identifiers. - Keys of the form ``a.b.c`` are very natural to write in Python as fields. If - a group of keys shares a prefix ending in ``.``, you can retrieve keys as a + Keys of the form ``a.b.c`` are very natural to write in Python as fields. If + a group of keys shares a prefix ending in ``.``, you can retrieve keys as a nested dictionary by calling only the prefix. For example, if ``r`` contains keys ``'foo'``, ``'bar.baz'``, and ``'bar.qux'``, ``r.bar`` returns a record - with the keys ``baz`` and ``qux``. If a key contains multiple ``.``, each - one is placed into a nested dictionary, so you can write ``r.bar.qux`` or + with the keys ``baz`` and ``qux``. If a key contains multiple ``.``, each + one is placed into a nested dictionary, so you can write ``r.bar.qux`` or ``r['bar.qux']`` interchangeably. """ sep = '.' @@ -215,7 +223,7 @@ def __call__(self, *args): def __getattr__(self, name): try: return self[name] - except KeyError: + except KeyError: raise AttributeError(name) def __delattr__(self, name): @@ -235,7 +243,7 @@ def __getitem__(self, key): return dict.__getitem__(self, key) key += self.sep result = record() - for k,v in six.iteritems(self): + for k, v in list(self.items()): if not k.startswith(key): continue suffix = k[len(key):] @@ -250,17 +258,16 @@ def __getitem__(self, key): else: result[suffix] = v if len(result) == 0: - raise KeyError("No key or prefix: %s" % key) + raise KeyError(f"No key or prefix: {key}") return result - -def record(value=None): - """This function returns a :class:`Record` instance constructed with an + +def record(value=None): + """This function returns a :class:`Record` instance constructed with an initial value that you provide. - - :param `value`: An initial record value. - :type `value`: ``dict`` + + :param value: An initial record value. + :type value: ``dict`` """ if value is None: value = {} return Record(value) - diff --git a/splunklib/results.py b/splunklib/results.py index 8543ab0d..f9b976cc 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -32,27 +32,15 @@ print "Results are a preview: %s" % reader.is_preview """ -from __future__ import absolute_import - from io import BufferedReader, BytesIO -from splunklib import six - from splunklib.six import deprecated -try: - import xml.etree.cElementTree as et -except: - import xml.etree.ElementTree as et +import xml.etree.ElementTree as et from collections import OrderedDict from json import loads as json_loads -try: - from splunklib.six.moves import cStringIO as StringIO -except: - from splunklib.six import StringIO - __all__ = [ "ResultsReader", "Message", @@ -76,7 +64,7 @@ def __init__(self, type_, message): self.message = message def __repr__(self): - return "%s: %s" % (self.type, self.message) + return f"{self.type}: {self.message}" def __eq__(self, other): return (self.type, self.message) == (other.type, other.message) @@ -264,25 +252,7 @@ def _parse_results(self, stream): elem.clear() elif elem.tag in ('text', 'v') and event == 'end': - try: - text = "".join(elem.itertext()) - except AttributeError: - # Assume we're running in Python < 2.7, before itertext() was added - # So we'll define it here - - def __itertext(self): - tag = self.tag - if not isinstance(tag, six.string_types) and tag is not None: - return - if self.text: - yield self.text - for e in self: - for s in __itertext(e): - yield s - if e.tail: - yield e.tail - - text = "".join(__itertext(elem)) + text = "".join(elem.itertext()) values.append(text) elem.clear() @@ -298,11 +268,10 @@ def __itertext(self): # splunk that is described in __init__. if 'no element found' in pe.msg: return - else: - raise + raise -class JSONResultsReader(object): +class JSONResultsReader: """This class returns dictionaries and Splunk messages from a JSON results stream. ``JSONResultsReader`` is iterable, and returns a ``dict`` for results, or a @@ -355,6 +324,8 @@ def next(self): def _parse_results(self, stream): """Parse results and messages out of *stream*.""" + msg_type = None + text = None for line in stream.readlines(): strip_line = line.strip() if strip_line.__len__() == 0: continue diff --git a/splunklib/six.py b/splunklib/six.py deleted file mode 100644 index d13e50c9..00000000 --- a/splunklib/six.py +++ /dev/null @@ -1,993 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.14.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" - _assertNotRegex = "assertNotRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -def assertNotRegex(self, *args, **kwargs): - return getattr(self, _assertNotRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] > (3,): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - # This does exactly the same what the :func:`py3:functools.update_wrapper` - # function does on Python versions after 3.2. It sets the ``__wrapped__`` - # attribute on ``wrapper`` object and it doesn't raise an error if any of - # the attributes mentioned in ``assigned`` and ``updated`` are missing on - # ``wrapped`` object. - def _update_wrapper(wrapper, wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - for attr in assigned: - try: - value = getattr(wrapped, attr) - except AttributeError: - continue - else: - setattr(wrapper, attr, value) - for attr in updated: - getattr(wrapper, attr).update(getattr(wrapped, attr, {})) - wrapper.__wrapped__ = wrapped - return wrapper - _update_wrapper.__doc__ = functools.update_wrapper.__doc__ - - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - return functools.partial(_update_wrapper, wrapped=wrapped, - assigned=assigned, updated=updated) - wraps.__doc__ = functools.wraps.__doc__ - -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, text_type): - return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def python_2_unicode_compatible(klass): - """ - A class decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) - -import warnings - -def deprecated(message): - def deprecated_decorator(func): - def deprecated_func(*args, **kwargs): - warnings.warn("{} is a deprecated function. {}".format(func.__name__, message), - category=DeprecationWarning, - stacklevel=2) - warnings.simplefilter('default', DeprecationWarning) - return func(*args, **kwargs) - return deprecated_func - return deprecated_decorator \ No newline at end of file