diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f69da87..f56b7a1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -40,7 +40,7 @@ jobs: run: | make test - - name: Lint with mypy and flake8. + - name: Lint with mypy and ruff. run: | make lint diff --git a/Makefile b/Makefile index 4700f7d..35b4ead 100644 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ coverage: venv source ${VENV}/bin/activate && pytest --cov=disruptive tests/ lint: venv - source ${VENV}/bin/activate && mypy --config-file ./mypy.ini disruptive/ && flake8 disruptive/ + source ${VENV}/bin/activate && mypy --config-file ./mypy.ini disruptive/ && ruff check . clean: rm -rf build/ dist/ pip-wheel-metadata/ *.egg-info .pytest_cache/ .mypy_cache/ $(VENV) coverage.xml diff --git a/README.md b/README.md index 9bbeb68..3b09f88 100644 --- a/README.md +++ b/README.md @@ -126,7 +126,7 @@ Run unit-tests against the currently active python version: make test ``` -Lint the package code using MyPy and flake8: +Lint the package code using MyPy and ruff: ``` make lint ``` diff --git a/disruptive/__init__.py b/disruptive/__init__.py index a652614..d0834f6 100644 --- a/disruptive/__init__.py +++ b/disruptive/__init__.py @@ -1,13 +1,13 @@ # Metadata -__version__ = '1.7.3' +__version__ = "1.7.3" # If set, logs of chosen level and higher are printed to console. # Default value None results in no logs at any level. log_level = None # REST API base URLs of which all endpoints are an expansion. -base_url = 'https://api.disruptive-technologies.com/v2' -emulator_base_url = 'https://emulator.disruptive-technologies.com/v2' +base_url = "https://api.disruptive-technologies.com/v2" +emulator_base_url = "https://emulator.disruptive-technologies.com/v2" # If a request response contains an error for which a series of retries is # worth considering, these variable determine how long to wait without an diff --git a/disruptive/authentication.py b/disruptive/authentication.py index 735dde2..483d7f9 100644 --- a/disruptive/authentication.py +++ b/disruptive/authentication.py @@ -17,25 +17,30 @@ def base64url_encode(data: bytes) -> str: def base64url_decode(data: str) -> bytes: - padding = '=' * (4 - (len(data) % 4)) + padding = "=" * (4 - (len(data) % 4)) return base64.urlsafe_b64decode(data + padding) -def create_jwt(payload: dict, - secret: str, - algorithm: str, - headers: dict, - ) -> str: +def create_jwt( + payload: dict, + secret: str, + algorithm: str, + headers: dict, +) -> str: headers["typ"] = "JWT" - header_encoded = base64url_encode(data=json.dumps( - obj=headers, - separators=(',', ':'), - ).encode("utf-8")) - payload_encoded = base64url_encode(json.dumps( - obj=payload, - separators=(',', ':'), - ).encode("utf-8")) + header_encoded = base64url_encode( + data=json.dumps( + obj=headers, + separators=(",", ":"), + ).encode("utf-8") + ) + payload_encoded = base64url_encode( + json.dumps( + obj=payload, + separators=(",", ":"), + ).encode("utf-8") + ) message = f"{header_encoded}.{payload_encoded}" @@ -53,11 +58,10 @@ def create_jwt(payload: dict, class _AuthRoutineBase(object): - def __init__(self) -> None: # Set default attributes. self._expiration: int = 0 - self._token: str = '' + self._token: str = "" def _has_expired(self) -> bool: """ @@ -106,7 +110,6 @@ def refresh(self) -> None: class Unauthenticated(_AuthRoutineBase): - def __init__(self) -> None: # Inherit parent class methods and attributes. super().__init__() @@ -125,21 +128,23 @@ def refresh(self) -> None: """ - msg = 'Missing Service Account credentials.\n\n' \ - 'Either set the following environment variables:\n\n' \ - ' DT_SERVICE_ACCOUNT_KEY_ID: Unique Service Account key ID.\n' \ - ' DT_SERVICE_ACCOUNT_SECRET: Unique Service Account secret.\n' \ - ' DT_SERVICE_ACCOUNT_EMAIL: Unique Service Account email.\n\n' \ - 'or provide them programmatically:\n\n' \ - ' import disruptive as dt\n\n' \ - ' dt.default_auth = dt.Auth.service_account(\n' \ - ' key_id="",\n' \ - ' secret="",\n' \ - ' email="",\n' \ - ' )\n\n' \ - 'See https://developer.d21s.com/api/' \ - 'libraries/python/client/authentication.html' \ - ' for more details.\n' + msg = ( + "Missing Service Account credentials.\n\n" + "Either set the following environment variables:\n\n" + " DT_SERVICE_ACCOUNT_KEY_ID: Unique Service Account key ID.\n" + " DT_SERVICE_ACCOUNT_SECRET: Unique Service Account secret.\n" + " DT_SERVICE_ACCOUNT_EMAIL: Unique Service Account email.\n\n" + "or provide them programmatically:\n\n" + " import disruptive as dt\n\n" + " dt.default_auth = dt.Auth.service_account(\n" + ' key_id="",\n' + ' secret="",\n' + ' email="",\n' + " )\n\n" + "See https://developer.d21s.com/api/" + "libraries/python/client/authentication.html" + " for more details.\n" + ) raise dterrors.Unauthorized(msg) @@ -155,9 +160,10 @@ class ServiceAccountAuth(_AuthRoutineBase): """ - supported_algorithms = ['HS256'] - token_endpoint = 'https://identity.'\ - 'disruptive-technologies.com/oauth2/token' + supported_algorithms = ["HS256"] + token_endpoint = ( + "https://identity.disruptive-technologies.com/oauth2/token" + ) def __init__(self, key_id: str, secret: str, email: str): # Inherit parent class methods and attributes. @@ -184,7 +190,7 @@ def email(self) -> str: return self._email def __repr__(self) -> str: - return '{}.{}({}, {}, {})'.format( + return "{}.{}({}, {}, {})".format( self.__class__.__module__, self.__class__.__name__, repr(self.key_id), @@ -202,25 +208,25 @@ def algorithm(self, algorithm: str) -> None: self._algorithm = algorithm else: raise dterrors.ConfigurationError( - f'unsupported algorithm {algorithm}' + f"unsupported algorithm {algorithm}" ) @classmethod def from_credentials_file(cls, credentials: dict) -> ServiceAccountAuth: - for key in ['keyId', 'secret', 'email', 'algorithm', 'tokenEndpoint']: - if key not in credentials['serviceAccount']: + for key in ["keyId", "secret", "email", "algorithm", "tokenEndpoint"]: + if key not in credentials["serviceAccount"]: raise dterrors.ConfigurationError( f'Invalid credentials file. Missing field "{key}".' ) - cfg = credentials['serviceAccount'] + cfg = credentials["serviceAccount"] auth_obj = cls( - key_id=cfg['keyId'], - secret=cfg['secret'], - email=cfg['email'], + key_id=cfg["keyId"], + secret=cfg["secret"], + email=cfg["email"], ) - auth_obj.algorithm = cfg['algorithm'] - auth_obj.token_endpoint = cfg['tokenEndpoint'] + auth_obj.algorithm = cfg["algorithm"] + auth_obj.token_endpoint = cfg["tokenEndpoint"] return auth_obj @@ -234,8 +240,8 @@ def refresh(self) -> None: """ response: dict = self._get_access_token() - self._expiration = time.time() + response['expires_in'] - self._token = 'Bearer {}'.format(response['access_token']) + self._expiration = time.time() + response["expires_in"] + self._token = "Bearer {}".format(response["access_token"]) def _get_access_token(self) -> dict: """ @@ -255,16 +261,16 @@ def _get_access_token(self) -> dict: # Construct the JWT header. jwt_headers: dict[str, str] = { - 'alg': self.algorithm, - 'kid': self.key_id, + "alg": self.algorithm, + "kid": self.key_id, } # Construct the JWT payload. jwt_payload: dict[str, Any] = { - 'iat': int(time.time()), # current unixtime - 'exp': int(time.time()) + 3600, # expiration unixtime - 'aud': self.token_endpoint, - 'iss': self.email, + "iat": int(time.time()), # current unixtime + "exp": int(time.time()) + 3600, # expiration unixtime + "aud": self.token_endpoint, + "iss": self.email, } # Sign and encode JWT with the secret. @@ -277,26 +283,28 @@ def _get_access_token(self) -> dict: # Prepare HTTP POST request data. # Note: The requests package applies Form URL-Encoding by default. - request_data: str = urllib.parse.urlencode({ - 'assertion': encoded_jwt, - 'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer' - }) + request_data: str = urllib.parse.urlencode( + { + "assertion": encoded_jwt, + "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", + } + ) # Exchange the JWT for an access token. try: access_token_response: dict = dtrequests.DTRequest.post( - url='', + url="", base_url=self.token_endpoint, data=request_data, - headers={'Content-Type': 'application/x-www-form-urlencoded'}, + headers={"Content-Type": "application/x-www-form-urlencoded"}, skip_auth=True, ) except dterrors.BadRequest: # Re-raise exception with more specific information. raise dterrors.Unauthorized( - 'Could not authenticate with the provided credentials.\n\n' - 'Read more: https://developer.d21s.com/docs/authentication' - '/oauth2#common-errors' + "Could not authenticate with the provided credentials.\n\n" + "Read more: https://developer.d21s.com/docs/authentication" + "/oauth2#common-errors" ) # Return the access token in the request. @@ -304,35 +312,37 @@ def _get_access_token(self) -> dict: def _service_account_env_vars() -> Unauthenticated | ServiceAccountAuth: - key_id = os.getenv('DT_SERVICE_ACCOUNT_KEY_ID', '') - secret = os.getenv('DT_SERVICE_ACCOUNT_SECRET', '') - email = os.getenv('DT_SERVICE_ACCOUNT_EMAIL', '') + key_id = os.getenv("DT_SERVICE_ACCOUNT_KEY_ID", "") + secret = os.getenv("DT_SERVICE_ACCOUNT_SECRET", "") + email = os.getenv("DT_SERVICE_ACCOUNT_EMAIL", "") - if '' in [key_id, secret, email]: + if "" in [key_id, secret, email]: return Unauthenticated() else: return Auth.service_account(key_id, secret, email) def _credentials_file() -> Unauthenticated | ServiceAccountAuth: - file_path = os.getenv('DT_CREDENTIALS_FILE') + file_path = os.getenv("DT_CREDENTIALS_FILE") if file_path is not None: if not os.path.exists(file_path): - msg = 'Missing credentials file.\n\n' \ - 'Environment variable DT_CREDENTIALS_FILE is set, but' \ - ' no file found at target path.\n' \ - f'{file_path}' + msg = ( + "Missing credentials file.\n\n" + "Environment variable DT_CREDENTIALS_FILE is set, but" + " no file found at target path.\n" + f"{file_path}" + ) raise FileNotFoundError(msg) - with open(file_path, 'r') as f: + with open(file_path, "r") as f: credentials = json.load(f) - if 'serviceAccount' in credentials: + if "serviceAccount" in credentials: return ServiceAccountAuth.from_credentials_file(credentials) return Unauthenticated() -class Auth(): +class Auth: """ Authenticates the API using a factory design pattern. The Auth class itself is only for namespacing purposes. @@ -360,11 +370,12 @@ def unauthenticated() -> Unauthenticated: return Unauthenticated() @classmethod - def service_account(cls, - key_id: str, - secret: str, - email: str, - ) -> ServiceAccountAuth: + def service_account( + cls, + key_id: str, + secret: str, + email: str, + ) -> ServiceAccountAuth: """ This method uses an OAuth2 authentication flow. With the provided credentials, a `JWT `_ is created and exchanged for @@ -396,11 +407,13 @@ def service_account(cls, """ # Check that credentials are populated strings. - cls._verify_str_credentials({ - 'key_id': key_id, - 'secret': secret, - 'email': email, - }) + cls._verify_str_credentials( + { + "key_id": key_id, + "secret": secret, + "email": email, + } + ) return ServiceAccountAuth(key_id, secret, email) @@ -428,16 +441,16 @@ def _verify_str_credentials(credentials: dict) -> None: # the environment with a fallback to an empty string. if len(credentials[key]) == 0: raise dterrors.ConfigurationError( - 'Authentication credential <{}> is' - ' empty string.'.format(key) + "Authentication credential <{}> is" + " empty string.".format(key) ) # If not, raise TypeError. else: raise dterrors._raise_builtin( TypeError, - 'Authentication credential <{}> got type <{}>. ' - 'Expected .'.format( + "Authentication credential <{}> got type <{}>. " + "Expected .".format( key, type(credentials[key]).__name__ - ) + ), ) diff --git a/disruptive/errors.py b/disruptive/errors.py index 5516608..0fbd858 100644 --- a/disruptive/errors.py +++ b/disruptive/errors.py @@ -62,10 +62,10 @@ def __init__(self, error: dict) -> None: super().__init__(error) # Unpack error dictionary. - self.device_id: str = error['device'].split('/')[-1] - self.project_id: str = error['device'].split('/')[1] - self.status_code: str = error['status']['code'] - self.message: str = error['status']['message'] + self.device_id: str = error["device"].split("/")[-1] + self.project_id: str = error["device"].split("/")[1] + self.status_code: str = error["status"]["code"] + self.message: str = error["status"]["message"] class LabelUpdateError(BatchError): @@ -92,10 +92,10 @@ def __init__(self, error: dict) -> None: super().__init__(error) # Unpack error dictionary. - self.device_id: str = error['device'].split('/')[-1] - self.project_id: str = error['device'].split('/')[1] - self.status_code: str = error['status']['code'] - self.message: str = error['status']['message'] + self.device_id: str = error["device"].split("/")[-1] + self.project_id: str = error["device"].split("/")[1] + self.status_code: str = error["status"]["code"] + self.message: str = error["status"]["message"] # ------------------------- ServerError ------------------------- @@ -280,9 +280,9 @@ def __init__(self, error: dict) -> None: # Inherit from ClaimError parent. super().__init__(error) - self.device_id: str = error['deviceId'] - self.code: str = error['code'] - self.message: str = error['message'] + self.device_id: str = error["deviceId"] + self.code: str = error["code"] + self.message: str = error["message"] class ClaimErrorKitNotFound(ClaimError): @@ -295,9 +295,9 @@ def __init__(self, error: dict) -> None: # Inherit from ClaimError parent. super().__init__(error) - self.kit_id: str = error['kitId'] - self.code: str = error['code'] - self.message: str = error['message'] + self.kit_id: str = error["kitId"] + self.code: str = error["code"] + self.message: str = error["message"] class ClaimErrorDeviceNotFound(ClaimError): @@ -310,16 +310,17 @@ def __init__(self, error: dict) -> None: # Inherit from ClaimError parent. super().__init__(error) - self.device_id: str = error['deviceId'] - self.code: str = error['code'] - self.message: str = error['message'] + self.device_id: str = error["deviceId"] + self.code: str = error["code"] + self.message: str = error["message"] # ------------------------- error handling ------------------------- -def parse_request_error(caught_error: Exception, - data: dict, - nth_attempt: int, - ) -> tuple: +def parse_request_error( + caught_error: Exception, + data: dict, + nth_attempt: int, +) -> tuple: """ Depending on the request error caught, choose a course of action. @@ -346,7 +347,7 @@ def parse_request_error(caught_error: Exception, # Read Timeouts should be attempted again. if isinstance(caught_error, requests.exceptions.ReadTimeout): return ( - ReadTimeout('Connection timed out.'), + ReadTimeout("Connection timed out."), True, nth_attempt**2, ) @@ -354,7 +355,7 @@ def parse_request_error(caught_error: Exception, # Connection errors should be attempted again. elif isinstance(caught_error, requests.exceptions.ConnectionError): return ( - ConnectionError('Failed to establish connection.'), + ConnectionError("Failed to establish connection."), True, nth_attempt**2, ) @@ -363,11 +364,12 @@ def parse_request_error(caught_error: Exception, return caught_error, False, None -def parse_api_status_code(status_code: Optional[int], - data: dict, - headers: Any, - nth_attempt: int, - ) -> Any: +def parse_api_status_code( + status_code: Optional[int], + data: dict, + headers: Any, + nth_attempt: int, +) -> Any: """ Depending on the status code, returns an exception, retry boolean and, incemented retry attempt. @@ -417,11 +419,11 @@ def parse_api_status_code(status_code: Optional[int], elif status_code == 409: return Conflict(data), False, None elif status_code == 429: - if 'Retry-After' in headers: + if "Retry-After" in headers: return ( TooManyRequests(data), True, - int(headers['Retry-After']), + int(headers["Retry-After"]), ) else: return TooManyRequests(data), False, None diff --git a/disruptive/events/__init__.py b/disruptive/events/__init__.py index 615ca4b..2c8ded9 100644 --- a/disruptive/events/__init__.py +++ b/disruptive/events/__init__.py @@ -6,15 +6,17 @@ from disruptive.events.events import Humidity as Humidity # noqa from disruptive.events.events import HumiditySample as HumiditySample # noqa from disruptive.events.events import ObjectPresentCount as ObjectPresentCount # noqa -from disruptive.events.events import TouchCount as TouchCount # noqa -from disruptive.events.events import WaterPresent as WaterPresent # noqa +from disruptive.events.events import TouchCount as TouchCount # noqa +from disruptive.events.events import WaterPresent as WaterPresent # noqa from disruptive.events.events import NetworkStatus as NetworkStatus # noqa -from disruptive.events.events import NetworkStatusCloudConnector as NetworkStatusCloudConnector # noqa +from disruptive.events.events import ( + NetworkStatusCloudConnector as NetworkStatusCloudConnector, +) # noqa from disruptive.events.events import BatteryStatus as BatteryStatus # noqa -from disruptive.events.events import LabelsChanged as LabelsChanged # noqa -from disruptive.events.events import ConnectionStatus as ConnectionStatus # noqa +from disruptive.events.events import LabelsChanged as LabelsChanged # noqa +from disruptive.events.events import ConnectionStatus as ConnectionStatus # noqa from disruptive.events.events import EthernetStatus as EthernetStatus # noqa -from disruptive.events.events import CellularStatus as CellularStatus # noqa +from disruptive.events.events import CellularStatus as CellularStatus # noqa from disruptive.events.events import Co2 as Co2 # noqa from disruptive.events.events import Pressure as Pressure # noqa from disruptive.events.events import Motion as Motion # noqa @@ -27,7 +29,9 @@ from disruptive.events.events import TEMPERATURE as TEMPERATURE # noqa from disruptive.events.events import OBJECT_PRESENT as OBJECT_PRESENT # noqa from disruptive.events.events import HUMIDITY as HUMIDITY # noqa -from disruptive.events.events import OBJECT_PRESENT_COUNT as OBJECT_PRESENT_COUNT # noqa +from disruptive.events.events import ( + OBJECT_PRESENT_COUNT as OBJECT_PRESENT_COUNT, +) # noqa from disruptive.events.events import TOUCH_COUNT as TOUCH_COUNT # noqa from disruptive.events.events import WATER_PRESENT as WATER_PRESENT # noqa from disruptive.events.events import NETWORK_STATUS as NETWORK_STATUS # noqa @@ -35,8 +39,8 @@ from disruptive.events.events import LABELS_CHANGED as LABELS_CHANGED # noqa from disruptive.events.events import CONNECTION_STATUS as CONNECTION_STATUS # noqa from disruptive.events.events import ETHERNET_STATUS as ETHERNET_STATUS # noqa -from disruptive.events.events import CELLULAR_STATUS as CELLULAR_STATUS # noqa -from disruptive.events.events import CO2 as CO2 # noqa +from disruptive.events.events import CELLULAR_STATUS as CELLULAR_STATUS # noqa +from disruptive.events.events import CO2 as CO2 # noqa from disruptive.events.events import PRESSURE as PRESSURE # noqa from disruptive.events.events import MOTION as MOTION # noqa from disruptive.events.events import DESK_OCCUPANCY as DESK_OCCUPANCY # noqa diff --git a/disruptive/events/events.py b/disruptive/events/events.py index 23f4b89..e250953 100644 --- a/disruptive/events/events.py +++ b/disruptive/events/events.py @@ -8,30 +8,45 @@ import disruptive.outputs as dtoutputs import disruptive.transforms as dttrans -TOUCH: str = 'touch' -TEMPERATURE: str = 'temperature' -OBJECT_PRESENT: str = 'objectPresent' -HUMIDITY: str = 'humidity' -OBJECT_PRESENT_COUNT: str = 'objectPresentCount' -TOUCH_COUNT: str = 'touchCount' -WATER_PRESENT: str = 'waterPresent' -NETWORK_STATUS: str = 'networkStatus' -BATTERY_STATUS: str = 'batteryStatus' -LABELS_CHANGED: str = 'labelsChanged' -CONNECTION_STATUS: str = 'connectionStatus' -ETHERNET_STATUS: str = 'ethernetStatus' -CELLULAR_STATUS: str = 'cellularStatus' -CO2: str = 'co2' -PRESSURE: str = 'pressure' -MOTION: str = 'motion' -DESK_OCCUPANCY: str = 'deskOccupancy' -CONTACT: str = 'contact' -PROBE_WIRE_STATUS: str = 'probeWireStatus' +TOUCH: str = "touch" +TEMPERATURE: str = "temperature" +OBJECT_PRESENT: str = "objectPresent" +HUMIDITY: str = "humidity" +OBJECT_PRESENT_COUNT: str = "objectPresentCount" +TOUCH_COUNT: str = "touchCount" +WATER_PRESENT: str = "waterPresent" +NETWORK_STATUS: str = "networkStatus" +BATTERY_STATUS: str = "batteryStatus" +LABELS_CHANGED: str = "labelsChanged" +CONNECTION_STATUS: str = "connectionStatus" +ETHERNET_STATUS: str = "ethernetStatus" +CELLULAR_STATUS: str = "cellularStatus" +CO2: str = "co2" +PRESSURE: str = "pressure" +MOTION: str = "motion" +DESK_OCCUPANCY: str = "deskOccupancy" +CONTACT: str = "contact" +PROBE_WIRE_STATUS: str = "probeWireStatus" EVENT_TYPES = [ - TOUCH, TEMPERATURE, OBJECT_PRESENT, HUMIDITY, OBJECT_PRESENT_COUNT, - TOUCH_COUNT, WATER_PRESENT, NETWORK_STATUS, BATTERY_STATUS, LABELS_CHANGED, - CONNECTION_STATUS, ETHERNET_STATUS, CELLULAR_STATUS, CO2, PRESSURE, MOTION, - DESK_OCCUPANCY, CONTACT, PROBE_WIRE_STATUS, + TOUCH, + TEMPERATURE, + OBJECT_PRESENT, + HUMIDITY, + OBJECT_PRESENT_COUNT, + TOUCH_COUNT, + WATER_PRESENT, + NETWORK_STATUS, + BATTERY_STATUS, + LABELS_CHANGED, + CONNECTION_STATUS, + ETHERNET_STATUS, + CELLULAR_STATUS, + CO2, + PRESSURE, + MOTION, + DESK_OCCUPANCY, + CONTACT, + PROBE_WIRE_STATUS, ] @@ -65,17 +80,17 @@ def __init__(self, data: dict, event_type: str) -> None: self.timestamp: Optional[datetime | str] = None # If timestamp is provided, verify type and set attribute. - if 'updateTime' in data: + if "updateTime" in data: # Raw should be iso8601 str format, while # attribute should be type datetime. - ts_iso8601 = dttrans.to_iso8601(data['updateTime']) - ts_datetime = dttrans.to_datetime(data['updateTime']) + ts_iso8601 = dttrans.to_iso8601(data["updateTime"]) + ts_datetime = dttrans.to_datetime(data["updateTime"]) # If we can not verify iso8601 format, remove field. if ts_iso8601 is not None: - data['updateTime'] = ts_iso8601 + data["updateTime"] = ts_iso8601 else: - del data['updateTime'] + del data["updateTime"] # Set datetime return as timestamp attribute. self.timestamp = ts_datetime @@ -90,10 +105,11 @@ def __init__(self, data: dict, event_type: str) -> None: dtoutputs.OutputBase.__init__(self, data) @classmethod - def from_event_type(cls, - data: dict, - event_type: str, - ) -> Optional[_EventType]: + def from_event_type( + cls, + data: dict, + event_type: str, + ) -> Optional[_EventType]: """ Constructs the appropriate child class from the provided event type. @@ -122,8 +138,9 @@ def from_event_type(cls, return child_instance @staticmethod - def __child_map(event_type: str, - ) -> tuple[Optional[_EventType], Optional[bool]]: + def __child_map( + event_type: str, + ) -> tuple[Optional[_EventType], Optional[bool]]: """ Based on provided event type, returns the child class and supporting information. @@ -140,13 +157,13 @@ def __child_map(event_type: str, out = ( getattr( disruptive.events, - _EVENTS_MAP._api_names[event_type].class_name + _EVENTS_MAP._api_names[event_type].class_name, ), _EVENTS_MAP._api_names[event_type].is_keyed, ) return out - dtlog.warning('Skipping unknown event type {}.'.format(event_type)) + dtlog.warning("Skipping unknown event type {}.".format(event_type)) return None, None @@ -177,12 +194,10 @@ def __init__(self, timestamp: Optional[datetime | str] = None): self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'touch') + _EventData.__init__(self, self.__repack(), "touch") def __repr__(self) -> str: - string = '{}.{}('\ - 'timestamp={}'\ - ')' + string = "{}.{}(timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -208,7 +223,7 @@ def _from_raw(cls, data: dict) -> Touch: # Construct the object with unpacked parameters. obj = cls( - timestamp=data['updateTime'], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -219,7 +234,7 @@ def _from_raw(cls, data: dict) -> Touch: def __repack(self) -> dict: data: dict = dict() if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -242,12 +257,13 @@ class Temperature(_EventData): """ - def __init__(self, - celsius: float, - samples: Optional[list] = None, - is_backfilled: Optional[bool] = None, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + celsius: float, + samples: Optional[list] = None, + is_backfilled: Optional[bool] = None, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the Temperature object. The `fahrenheit` attribute is calculated from the provided `celsius` parameter. @@ -274,15 +290,12 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'temperature') + _EventData.__init__(self, self.__repack(), "temperature") def __repr__(self) -> str: - string = '{}.{}('\ - 'celsius={}, '\ - 'samples={}, '\ - 'is_backfilled={}, '\ - 'timestamp={}'\ - ')' + string = ( + "{}.{}(celsius={}, samples={}, is_backfilled={}, timestamp={})" + ) return string.format( self.__class__.__module__, self.__class__.__name__, @@ -311,18 +324,20 @@ def _from_raw(cls, data: dict) -> Temperature: # Convert samples dictionaries to TemperatureSample objects. sample_objs = [] - for sample in data['samples']: - sample_objs.append(TemperatureSample( - celsius=sample['value'], - timestamp=sample['sampleTime'], - )) + for sample in data["samples"]: + sample_objs.append( + TemperatureSample( + celsius=sample["value"], + timestamp=sample["sampleTime"], + ) + ) # Construct the object with unpacked parameters. obj = cls( - celsius=data['value'], + celsius=data["value"], samples=sample_objs, - is_backfilled=data['isBackfilled'], - timestamp=data['updateTime'], + is_backfilled=data["isBackfilled"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -333,13 +348,13 @@ def _from_raw(cls, data: dict) -> Temperature: def __repack(self) -> dict: data: dict = dict() if self.celsius is not None: - data['value'] = self.celsius + data["value"] = self.celsius if self.samples is not None: - data['samples'] = [s._raw for s in self.samples] + data["samples"] = [s._raw for s in self.samples] if self.is_backfilled is not None: - data['isBackfilled'] = self.is_backfilled + data["isBackfilled"] = self.is_backfilled if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -358,10 +373,11 @@ class TemperatureSample(dtoutputs.OutputBase): """ - def __init__(self, - celsius: float, - timestamp: datetime | str, - ) -> None: + def __init__( + self, + celsius: float, + timestamp: datetime | str, + ) -> None: """ Constructs the TemperatureSample object. The `fahrenheit` attribute is calculated from the provided `celsius` parameter. @@ -385,10 +401,7 @@ def __init__(self, dtoutputs.OutputBase.__init__(self, self.__repack()) def __repr__(self) -> str: - string = '{}.{}('\ - 'celsius={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(celsius={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -400,8 +413,8 @@ def __repr__(self) -> str: def _from_raw(cls, data: dict) -> TemperatureSample: # Construct the object with unpacked parameters. obj = cls( - celsius=data['value'], - timestamp=data['sampleTime'], + celsius=data["value"], + timestamp=data["sampleTime"], ) # Inherit parent class. @@ -412,9 +425,9 @@ def _from_raw(cls, data: dict) -> TemperatureSample: def __repack(self) -> dict: data: dict = dict() if self.celsius is not None: - data['value'] = self.celsius + data["value"] = self.celsius if self.timestamp is not None: - data['sampleTime'] = dttrans.to_iso8601(self.timestamp) + data["sampleTime"] = dttrans.to_iso8601(self.timestamp) return data @@ -431,13 +444,14 @@ class ObjectPresent(_EventData): """ - STATE_PRESENT = 'PRESENT' - STATE_NOT_PRESENT = 'NOT_PRESENT' + STATE_PRESENT = "PRESENT" + STATE_NOT_PRESENT = "NOT_PRESENT" - def __init__(self, - state: str, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + state: str, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the ObjectPresent object, inheriting parent class and setting the type-specific attributes. @@ -457,13 +471,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'objectPresent') + _EventData.__init__(self, self.__repack(), "objectPresent") def __repr__(self) -> str: - string = '{}.{}('\ - 'state={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(state={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -490,8 +501,8 @@ def _from_raw(cls, data: dict) -> ObjectPresent: # Construct the object with unpacked parameters. obj = cls( - state=data['state'], - timestamp=data['updateTime'], + state=data["state"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -502,9 +513,9 @@ def _from_raw(cls, data: dict) -> ObjectPresent: def __repack(self) -> dict: data: dict = dict() if self.state is not None: - data['state'] = self.state + data["state"] = self.state if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -526,13 +537,14 @@ class Humidity(_EventData): """ - def __init__(self, - celsius: float, - relative_humidity: float, - samples: Optional[list] = None, - is_backfilled: Optional[bool] = None, - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + celsius: float, + relative_humidity: float, + samples: Optional[list] = None, + is_backfilled: Optional[bool] = None, + timestamp: Optional[datetime | str] = None, + ): """ Constructs the Humidity object. @@ -562,16 +574,18 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'humidity') + _EventData.__init__(self, self.__repack(), "humidity") def __repr__(self) -> str: - string = '{}.{}('\ - 'celsius={}, '\ - 'relative_humidity={}, '\ - 'samples={}, '\ - 'is_backfilled={}, '\ - 'timestamp={}'\ - ')' + string = ( + "{}.{}(" + "celsius={}, " + "relative_humidity={}, " + "samples={}, " + "is_backfilled={}, " + "timestamp={}" + ")" + ) return string.format( self.__class__.__module__, self.__class__.__name__, @@ -601,20 +615,22 @@ def _from_raw(cls, data: dict) -> Humidity: # Convert samples dictionaries to HumiditySample objects. sample_objs = [] - for sample in data['samples']: - sample_objs.append(HumiditySample( - celsius=sample['temperature'], - relative_humidity=sample['relativeHumidity'], - timestamp=sample['sampleTime'], - )) + for sample in data["samples"]: + sample_objs.append( + HumiditySample( + celsius=sample["temperature"], + relative_humidity=sample["relativeHumidity"], + timestamp=sample["sampleTime"], + ) + ) # Construct the object with unpacked parameters. obj = cls( - celsius=data['temperature'], - relative_humidity=data['relativeHumidity'], + celsius=data["temperature"], + relative_humidity=data["relativeHumidity"], samples=sample_objs, - is_backfilled=data['isBackfilled'], - timestamp=data['updateTime'], + is_backfilled=data["isBackfilled"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -625,13 +641,13 @@ def _from_raw(cls, data: dict) -> Humidity: def __repack(self) -> dict: data: dict = dict() if self.celsius is not None: - data['temperature'] = self.celsius + data["temperature"] = self.celsius if self.relative_humidity is not None: - data['relativeHumidity'] = self.relative_humidity + data["relativeHumidity"] = self.relative_humidity if self.is_backfilled is not None: - data['isBackfilled'] = self.is_backfilled + data["isBackfilled"] = self.is_backfilled if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -652,11 +668,12 @@ class HumiditySample(dtoutputs.OutputBase): """ - def __init__(self, - celsius: float, - relative_humidity: float, - timestamp: datetime | str, - ) -> None: + def __init__( + self, + celsius: float, + relative_humidity: float, + timestamp: datetime | str, + ) -> None: """ Constructs the TemperatureSample object. The `fahrenheit` attribute is calculated from the provided `celsius` parameter. @@ -683,11 +700,7 @@ def __init__(self, dtoutputs.OutputBase.__init__(self, self.__repack()) def __repr__(self) -> str: - string = '{}.{}('\ - 'celsius={}, '\ - 'relative_humidity={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(celsius={}, relative_humidity={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -700,9 +713,9 @@ def __repr__(self) -> str: def _from_raw(cls, data: dict) -> HumiditySample: # Construct the object with unpacked parameters. obj = cls( - celsius=data['temperature'], - relative_humidity=data['relativeHumidity'], - timestamp=data['sampleTime'], + celsius=data["temperature"], + relative_humidity=data["relativeHumidity"], + timestamp=data["sampleTime"], ) # Inherit parent class. @@ -713,11 +726,11 @@ def _from_raw(cls, data: dict) -> HumiditySample: def __repack(self) -> dict: data: dict = dict() if self.celsius is not None: - data['celsius'] = self.celsius + data["celsius"] = self.celsius if self.relative_humidity is not None: - data['relativeHumidity'] = self.relative_humidity + data["relativeHumidity"] = self.relative_humidity if self.timestamp is not None: - data['sampleTime'] = dttrans.to_iso8601(self.timestamp) + data["sampleTime"] = dttrans.to_iso8601(self.timestamp) return data @@ -735,10 +748,11 @@ class ObjectPresentCount(_EventData): """ - def __init__(self, - total: int, - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + total: int, + timestamp: Optional[datetime | str] = None, + ): """ Constructs the ObjectPresentCount object. @@ -758,13 +772,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'objectPresentCount') + _EventData.__init__(self, self.__repack(), "objectPresentCount") def __repr__(self) -> str: - string = '{}.{}('\ - 'total={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(total={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -791,8 +802,8 @@ def _from_raw(cls, data: dict) -> ObjectPresentCount: # Construct the object with unpacked parameters. obj = cls( - total=data['total'], - timestamp=data['updateTime'], + total=data["total"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -802,9 +813,9 @@ def _from_raw(cls, data: dict) -> ObjectPresentCount: def __repack(self) -> dict: data: dict = dict() - data['total'] = self.total + data["total"] = self.total if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -822,10 +833,11 @@ class TouchCount(_EventData): """ - def __init__(self, - total: int, - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + total: int, + timestamp: Optional[datetime | str] = None, + ): """ Constructs the TouchCount object. @@ -845,13 +857,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'touchCount') + _EventData.__init__(self, self.__repack(), "touchCount") def __repr__(self) -> str: - string = '{}.{}('\ - 'total={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(total={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -878,8 +887,8 @@ def _from_raw(cls, data: dict) -> TouchCount: # Construct the object with unpacked parameters. obj = cls( - total=data['total'], - timestamp=data['updateTime'], + total=data["total"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -889,9 +898,9 @@ def _from_raw(cls, data: dict) -> TouchCount: def __repack(self) -> dict: data: dict = dict() - data['total'] = self.total + data["total"] = self.total if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -908,13 +917,14 @@ class WaterPresent(_EventData): """ - STATE_PRESENT = 'PRESENT' - STATE_NOT_PRESENT = 'NOT_PRESENT' + STATE_PRESENT = "PRESENT" + STATE_NOT_PRESENT = "NOT_PRESENT" - def __init__(self, - state: str, - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + state: str, + timestamp: Optional[datetime | str] = None, + ): """ Constructs the WaterPresent object. @@ -933,13 +943,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'waterPresent') + _EventData.__init__(self, self.__repack(), "waterPresent") def __repr__(self) -> str: - string = '{}.{}('\ - 'state={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(state={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -966,8 +973,8 @@ def _from_raw(cls, data: dict) -> WaterPresent: # Construct the object with unpacked parameters. obj = cls( - state=data['state'], - timestamp=data['updateTime'], + state=data["state"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -977,9 +984,9 @@ def _from_raw(cls, data: dict) -> WaterPresent: def __repack(self) -> dict: data: dict = dict() - data['state'] = self.state + data["state"] = self.state if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1000,11 +1007,12 @@ class NetworkStatusCloudConnector(dtoutputs.OutputBase): """ - def __init__(self, - device_id: str, - signal_strength: int, - rssi: int, - ): + def __init__( + self, + device_id: str, + signal_strength: int, + rssi: int, + ): """ Constructs the NetworkStatusCloudConnector object. @@ -1030,11 +1038,7 @@ def __init__(self, self.rssi: int = rssi def __repr__(self) -> str: - string = '{}.{}('\ - 'device_id={}, '\ - 'signal_strength={}, '\ - 'rssi={}'\ - ')' + string = "{}.{}(device_id={}, signal_strength={}, rssi={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1062,9 +1066,9 @@ def _from_raw(cls, data: dict) -> NetworkStatusCloudConnector: # Construct the object with unpacked parameters. obj = cls( - device_id=data['id'], - signal_strength=data['signalStrength'], - rssi=data['rssi'], + device_id=data["id"], + signal_strength=data["signalStrength"], + rssi=data["rssi"], ) # Re-inherit from parent, but now providing response data. @@ -1095,15 +1099,14 @@ class NetworkStatus(_EventData): """ - def __init__(self, - signal_strength: Optional[int] = None, - rssi: Optional[int] = None, - transmission_mode: Optional[str] = None, - cloud_connectors: - Optional[list[NetworkStatusCloudConnector]] = None, - timestamp: Optional[datetime | str] = None, - ) -> None: - + def __init__( + self, + signal_strength: Optional[int] = None, + rssi: Optional[int] = None, + transmission_mode: Optional[str] = None, + cloud_connectors: Optional[list[NetworkStatusCloudConnector]] = None, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the NetworkStatus object. @@ -1130,21 +1133,24 @@ def __init__(self, self.signal_strength: Optional[int] = signal_strength self.rssi: Optional[int] = rssi self.transmission_mode: Optional[str] = transmission_mode - self.cloud_connectors: Optional[list[NetworkStatusCloudConnector]] = \ + self.cloud_connectors: Optional[list[NetworkStatusCloudConnector]] = ( cloud_connectors + ) self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'networkStatus') + _EventData.__init__(self, self.__repack(), "networkStatus") def __repr__(self) -> str: - string = '{}.{}('\ - 'signal_strength={}, '\ - 'rssi={}, '\ - 'transmission_mode={}, '\ - 'cloud_connectors={}, '\ - 'timestamp={}'\ - ')' + string = ( + "{}.{}(" + "signal_strength={}, " + "rssi={}, " + "transmission_mode={}, " + "cloud_connectors={}, " + "timestamp={}" + ")" + ) return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1174,18 +1180,18 @@ def _from_raw(cls, data: dict) -> NetworkStatus: # Isolate list of NetworkStatusCloudConnector objects. cloud_connectors = [] - for ccon in data['cloudConnectors']: + for ccon in data["cloudConnectors"]: cloud_connectors.append( NetworkStatusCloudConnector._from_raw(ccon) ) # Construct the object with unpacked parameters. obj = cls( - signal_strength=data['signalStrength'], - rssi=data['rssi'], - transmission_mode=data['transmissionMode'], + signal_strength=data["signalStrength"], + rssi=data["rssi"], + transmission_mode=data["transmissionMode"], cloud_connectors=cloud_connectors, - timestamp=data['updateTime'], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1196,23 +1202,23 @@ def _from_raw(cls, data: dict) -> NetworkStatus: def __repack(self) -> dict: data: dict = dict() if self.signal_strength is not None: - data['signalStrength'] = self.signal_strength + data["signalStrength"] = self.signal_strength if self.rssi is not None: - data['rssi'] = self.rssi + data["rssi"] = self.rssi if self.transmission_mode is not None: - data['transmissionMode'] = self.transmission_mode + data["transmissionMode"] = self.transmission_mode if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp if self.cloud_connectors is not None: - data['cloud_connectors'] = [] + data["cloud_connectors"] = [] for ccon in self.cloud_connectors: ccon_data: dict = dict() if ccon.device_id is not None: - ccon_data['id'] = ccon.device_id + ccon_data["id"] = ccon.device_id if ccon.signal_strength is not None: - ccon_data['signalStrength'] = ccon.signal_strength + ccon_data["signalStrength"] = ccon.signal_strength if len(ccon_data) > 0: - data['cloud_connectors'].append(ccon_data) + data["cloud_connectors"].append(ccon_data) return data @@ -1229,10 +1235,11 @@ class BatteryStatus(_EventData): """ - def __init__(self, - percentage: int, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + percentage: int, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the Temperature object. @@ -1251,13 +1258,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'batteryStatus') + _EventData.__init__(self, self.__repack(), "batteryStatus") def __repr__(self) -> str: - string = '{}.{}('\ - 'percentage={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(percentage={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1284,8 +1288,8 @@ def _from_raw(cls, data: dict) -> BatteryStatus: # Construct the object with unpacked parameters. obj = cls( - percentage=data['percentage'], - timestamp=data['updateTime'], + percentage=data["percentage"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1296,9 +1300,9 @@ def _from_raw(cls, data: dict) -> BatteryStatus: def __repack(self) -> dict: data: dict = dict() if self.percentage is not None: - data['percentage'] = self.percentage + data["percentage"] = self.percentage if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1319,12 +1323,13 @@ class LabelsChanged(_EventData): """ - def __init__(self, - added: dict[str, str], - modified: dict[str, str], - removed: list[str], - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + added: dict[str, str], + modified: dict[str, str], + removed: list[str], + timestamp: Optional[datetime | str] = None, + ): """ Constructs the LabelsChanged object. @@ -1349,15 +1354,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'labelsChanged') + _EventData.__init__(self, self.__repack(), "labelsChanged") def __repr__(self) -> str: - string = '{}.{}('\ - 'added={}, '\ - 'modified={}, '\ - 'removed={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(added={}, modified={}, removed={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1386,10 +1386,10 @@ def _from_raw(cls, data: dict) -> LabelsChanged: # Construct the object with unpacked parameters. obj = cls( - added=data['added'], - modified=data['modified'], - removed=data['removed'], - timestamp=data['updateTime'], + added=data["added"], + modified=data["modified"], + removed=data["removed"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1400,13 +1400,13 @@ def _from_raw(cls, data: dict) -> LabelsChanged: def __repack(self) -> dict: data: dict = dict() if self.added is not None: - data['added'] = self.added + data["added"] = self.added if self.modified is not None: - data['modified'] = self.modified + data["modified"] = self.modified if self.removed is not None: - data['removed'] = self.removed + data["removed"] = self.removed if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1430,16 +1430,17 @@ class ConnectionStatus(_EventData): """ - CONNECTION_SDS: str = 'SDS' - CONNECTION_ETHERNET: str = 'ETHERNET' - CONNECTION_CELLULAR: str = 'CELLULAR' - CONNECTION_OFFLINE: str = 'OFFLINE' + CONNECTION_SDS: str = "SDS" + CONNECTION_ETHERNET: str = "ETHERNET" + CONNECTION_CELLULAR: str = "CELLULAR" + CONNECTION_OFFLINE: str = "OFFLINE" - def __init__(self, - connection: str, - available: list[str], - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + connection: str, + available: list[str], + timestamp: Optional[datetime | str] = None, + ): """ Constructs the ConnectionStatus object. @@ -1465,14 +1466,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'connectionStatus') + _EventData.__init__(self, self.__repack(), "connectionStatus") def __repr__(self) -> str: - string = '{}.{}('\ - 'connection={}, '\ - 'available={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(connection={}, available={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1500,9 +1497,9 @@ def _from_raw(cls, data: dict) -> ConnectionStatus: # Construct the object with unpacked parameters. obj = cls( - connection=data['connection'], - available=data['available'], - timestamp=data['updateTime'], + connection=data["connection"], + available=data["available"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1513,11 +1510,11 @@ def _from_raw(cls, data: dict) -> ConnectionStatus: def __repack(self) -> dict: data: dict = dict() if self.connection is not None: - data['connection'] = self.connection + data["connection"] = self.connection if self.available is not None: - data['available'] = self.available + data["available"] = self.available if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1536,11 +1533,12 @@ class EthernetStatus(_EventData): """ - def __init__(self, - mac_address: str, - ip_address: str, - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + mac_address: str, + ip_address: str, + timestamp: Optional[datetime | str] = None, + ): """ Constructs the EthernetStatus object, inheriting parent class and setting the type-specific attributes. @@ -1563,14 +1561,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'ethernetStatus') + _EventData.__init__(self, self.__repack(), "ethernetStatus") def __repr__(self) -> str: - string = '{}.{}('\ - 'mac_address={}, '\ - 'ip_address={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(mac_address={}, ip_address={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1598,9 +1592,9 @@ def _from_raw(cls, data: dict) -> EthernetStatus: # Construct the object with unpacked parameters. obj = cls( - mac_address=data['macAddress'], - ip_address=data['ipAddress'], - timestamp=data['updateTime'], + mac_address=data["macAddress"], + ip_address=data["ipAddress"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1611,11 +1605,11 @@ def _from_raw(cls, data: dict) -> EthernetStatus: def __repack(self) -> dict: data: dict = dict() if self.mac_address is not None: - data['macAddress'] = self.mac_address + data["macAddress"] = self.mac_address if self.ip_address is not None: - data['ipAddress'] = self.ip_address + data["ipAddress"] = self.ip_address if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1632,10 +1626,11 @@ class CellularStatus(_EventData): """ - def __init__(self, - signal_strength: int, - timestamp: Optional[datetime | str] = None, - ): + def __init__( + self, + signal_strength: int, + timestamp: Optional[datetime | str] = None, + ): """ Constructs the Temperature object. @@ -1654,13 +1649,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'cellularStatus') + _EventData.__init__(self, self.__repack(), "cellularStatus") def __repr__(self) -> str: - string = '{}.{}('\ - 'signal_strength={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(signal_strength={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1687,8 +1679,8 @@ def _from_raw(cls, data: dict) -> CellularStatus: # Construct the object with unpacked parameters. obj = cls( - signal_strength=data['signalStrength'], - timestamp=data['updateTime'], + signal_strength=data["signalStrength"], + timestamp=data["updateTime"], ) # Re-inherit from parent using a full raw dictionary. @@ -1699,9 +1691,9 @@ def _from_raw(cls, data: dict) -> CellularStatus: def __repack(self) -> dict: data: dict = dict() if self.signal_strength is not None: - data['signalStrength'] = self.signal_strength + data["signalStrength"] = self.signal_strength if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1718,10 +1710,11 @@ class Co2(_EventData): """ - def __init__(self, - ppm: int, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + ppm: int, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the Co2 object. @@ -1740,13 +1733,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'co2') + _EventData.__init__(self, self.__repack(), "co2") def __repr__(self) -> str: - string = '{}.{}('\ - 'ppm={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(ppm={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1773,8 +1763,8 @@ def _from_raw(cls, data: dict) -> Co2: # Construct the object with unpacked parameters. obj = cls( - ppm=data['ppm'], - timestamp=data['updateTime'], + ppm=data["ppm"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1785,9 +1775,9 @@ def _from_raw(cls, data: dict) -> Co2: def __repack(self) -> dict: data: dict = dict() if self.ppm is not None: - data['ppm'] = self.ppm + data["ppm"] = self.ppm if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1804,10 +1794,11 @@ class Pressure(_EventData): """ - def __init__(self, - pascal: float, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + pascal: float, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the Pressure object. @@ -1826,13 +1817,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'pressure') + _EventData.__init__(self, self.__repack(), "pressure") def __repr__(self) -> str: - string = '{}.{}('\ - 'pascal={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(pascal={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1859,8 +1847,8 @@ def _from_raw(cls, data: dict) -> Pressure: # Construct the object with unpacked parameters. obj = cls( - pascal=data['pascal'], - timestamp=data['updateTime'], + pascal=data["pascal"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1871,9 +1859,9 @@ def _from_raw(cls, data: dict) -> Pressure: def __repack(self) -> dict: data: dict = dict() if self.pascal is not None: - data['pascal'] = self.pascal + data["pascal"] = self.pascal if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1890,13 +1878,14 @@ class Motion(_EventData): """ - STATE_MOTION_DETECTED = 'MOTION_DETECTED' - STATE_NO_MOTION_DETECTED = 'NO_MOTION_DETECTED' + STATE_MOTION_DETECTED = "MOTION_DETECTED" + STATE_NO_MOTION_DETECTED = "NO_MOTION_DETECTED" - def __init__(self, - state: str, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + state: str, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the Motion object, inheriting parent class and setting the type-specific attributes. @@ -1916,13 +1905,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'motion') + _EventData.__init__(self, self.__repack(), "motion") def __repr__(self) -> str: - string = '{}.{}('\ - 'state={}, '\ - 'timestamp={}'\ - ')' + string = "{}.{}(state={}, timestamp={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -1949,8 +1935,8 @@ def _from_raw(cls, data: dict) -> Motion: # Construct the object with unpacked parameters. obj = cls( - state=data['state'], - timestamp=data['updateTime'], + state=data["state"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -1961,9 +1947,9 @@ def _from_raw(cls, data: dict) -> Motion: def __repack(self) -> dict: data: dict = dict() if self.state is not None: - data['state'] = self.state + data["state"] = self.state if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -1984,14 +1970,15 @@ class DeskOccupancy(_EventData): """ - STATE_OCCUPIED = 'OCCUPIED' - STATE_NOT_OCCUPIED = 'NOT_OCCUPIED' + STATE_OCCUPIED = "OCCUPIED" + STATE_NOT_OCCUPIED = "NOT_OCCUPIED" - def __init__(self, - state: str, - timestamp: Optional[datetime | str] = None, - remarks: Optional[list[str]] = None, - ) -> None: + def __init__( + self, + state: str, + timestamp: Optional[datetime | str] = None, + remarks: Optional[list[str]] = None, + ) -> None: """ Constructs the DeskOccupancy object, inheriting parent class and setting the type-specific attributes. @@ -2016,14 +2003,10 @@ def __init__(self, self.remarks: Optional[list[str]] = remarks # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'deskOccupancy') + _EventData.__init__(self, self.__repack(), "deskOccupancy") def __repr__(self) -> str: - string = '{}.{}('\ - 'state={}, '\ - 'timestamp={}, '\ - 'remarks={}'\ - ')' + string = "{}.{}(state={}, timestamp={}, remarks={})" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -2051,9 +2034,9 @@ def _from_raw(cls, data: dict) -> DeskOccupancy: # Construct the object with unpacked parameters. obj = cls( - state=data['state'], - timestamp=data['updateTime'], - remarks=data['remarks'], + state=data["state"], + timestamp=data["updateTime"], + remarks=data["remarks"], ) # Re-inherit from parent, but now providing response data. @@ -2064,11 +2047,11 @@ def _from_raw(cls, data: dict) -> DeskOccupancy: def __repack(self) -> dict: data: dict = dict() if self.state is not None: - data['state'] = self.state + data["state"] = self.state if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp if self.remarks is not None: - data['remarks'] = self.remarks + data["remarks"] = self.remarks return data @@ -2085,13 +2068,14 @@ class Contact(_EventData): """ - STATE_CLOSED: str = 'CLOSED' - STATE_OPEN: str = 'OPEN' + STATE_CLOSED: str = "CLOSED" + STATE_OPEN: str = "OPEN" - def __init__(self, - state: str, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + state: str, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the Contact object, inheriting parent class and setting the type-specific attributes. @@ -2110,13 +2094,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'contact') + _EventData.__init__(self, self.__repack(), "contact") def __repr__(self) -> str: - string = '{}.{}('\ - 'state={}, '\ - 'timestamp={}, '\ - ')' + string = "{}.{}(state={}, timestamp={}, )" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -2143,8 +2124,8 @@ def _from_raw(cls, data: dict) -> Contact: # Construct the object with unpacked parameters. obj = cls( - state=data['state'], - timestamp=data['updateTime'], + state=data["state"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -2155,9 +2136,9 @@ def _from_raw(cls, data: dict) -> Contact: def __repack(self) -> dict: data: dict = dict() if self.state is not None: - data['state'] = self.state + data["state"] = self.state if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -2172,17 +2153,19 @@ class ProbeWireStatus(_EventData): """ - STATE_INVALID_WIRE_CONFIGURATION: str = 'INVALID_WIRE_CONFIGURATION' - STATE_INVALID_COEFFICIENT_CONFIGURATION: str \ - = 'INVALID_COEFFICIENT_CONFIGURATION' - STATE_TWO_WIRE: str = 'TWO_WIRE' - STATE_THREE_WIRE: str = 'THREE_WIRE' - STATE_FOUR_WIRE: str = 'FOUR_WIRE' + STATE_INVALID_WIRE_CONFIGURATION: str = "INVALID_WIRE_CONFIGURATION" + STATE_INVALID_COEFFICIENT_CONFIGURATION: str = ( + "INVALID_COEFFICIENT_CONFIGURATION" + ) + STATE_TWO_WIRE: str = "TWO_WIRE" + STATE_THREE_WIRE: str = "THREE_WIRE" + STATE_FOUR_WIRE: str = "FOUR_WIRE" - def __init__(self, - state: str, - timestamp: Optional[datetime | str] = None, - ) -> None: + def __init__( + self, + state: str, + timestamp: Optional[datetime | str] = None, + ) -> None: """ Constructs the ProbeWireStatus object, inheriting parent class and setting the type-specific attributes. @@ -2203,13 +2186,10 @@ def __init__(self, self.timestamp: Optional[datetime | str] = timestamp # Inherit parent _EventData class init with repacked data dictionary. - _EventData.__init__(self, self.__repack(), 'probeWireStatus') + _EventData.__init__(self, self.__repack(), "probeWireStatus") def __repr__(self) -> str: - string = '{}.{}('\ - 'state={}, '\ - 'timestamp={}, '\ - ')' + string = "{}.{}(state={}, timestamp={}, )" return string.format( self.__class__.__module__, self.__class__.__name__, @@ -2236,8 +2216,8 @@ def _from_raw(cls, data: dict) -> ProbeWireStatus: # Construct the object with unpacked parameters. obj = cls( - state=data['state'], - timestamp=data['updateTime'], + state=data["state"], + timestamp=data["updateTime"], ) # Re-inherit from parent, but now providing response data. @@ -2248,9 +2228,9 @@ def _from_raw(cls, data: dict) -> ProbeWireStatus: def __repack(self) -> dict: data: dict = dict() if self.state is not None: - data['state'] = self.state + data["state"] = self.state if self.timestamp is not None: - data['updateTime'] = self.timestamp + data["updateTime"] = self.timestamp return data @@ -2280,20 +2260,20 @@ def __init__(self, event: dict): dtoutputs.OutputBase.__init__(self, event) # Unpack attributes from dictionary. - self.event_id: str = event['eventId'] - self.event_type: str = event['eventType'] - self.device_id: str = event['targetName'].split('/')[-1] - self.project_id: str = event['targetName'].split('/')[1] + self.event_id: str = event["eventId"] + self.event_type: str = event["eventType"] + self.device_id: str = event["targetName"].split("/")[-1] + self.project_id: str = event["targetName"].split("/")[1] # Since labelsChanged is the only event that does not # contain an updateTime field in data, we provide the # field as it is a massive convenience boost. - if self.event_type == 'labelsChanged': - event['data']['updateTime'] = event['timestamp'] + if self.event_type == "labelsChanged": + event["data"]["updateTime"] = event["timestamp"] # Initialize the appropriate data class. self.data = _EventData.from_event_type( - event['data'], + event["data"], self.event_type, ) @@ -2325,134 +2305,133 @@ def from_mixed_list(cls, events: list[dict]) -> list[Event]: return object_list -class __EventsMap(): - - class __TypeNames(): - - def __init__(self, - api_name: str, - attr_name: str, - class_name: str, - is_keyed: bool - ) -> None: +class __EventsMap: + class __TypeNames: + def __init__( + self, + api_name: str, + attr_name: str, + class_name: str, + is_keyed: bool, + ) -> None: self.api_name = api_name self.attr_name = attr_name self.class_name = class_name self.is_keyed = is_keyed _api_names = { - 'touch': __TypeNames( - api_name='touch', - attr_name='touch', - class_name='Touch', - is_keyed=True + "touch": __TypeNames( + api_name="touch", + attr_name="touch", + class_name="Touch", + is_keyed=True, ), - 'temperature': __TypeNames( - api_name='temperature', - attr_name='temperature', - class_name='Temperature', - is_keyed=True + "temperature": __TypeNames( + api_name="temperature", + attr_name="temperature", + class_name="Temperature", + is_keyed=True, ), - 'objectPresent': __TypeNames( - api_name='objectPresent', - attr_name='object_present', - class_name='ObjectPresent', - is_keyed=True + "objectPresent": __TypeNames( + api_name="objectPresent", + attr_name="object_present", + class_name="ObjectPresent", + is_keyed=True, ), - 'humidity': __TypeNames( - api_name='humidity', - attr_name='humidity', - class_name='Humidity', - is_keyed=True + "humidity": __TypeNames( + api_name="humidity", + attr_name="humidity", + class_name="Humidity", + is_keyed=True, ), - 'objectPresentCount': __TypeNames( - api_name='objectPresentCount', - attr_name='object_present_count', - class_name='ObjectPresentCount', - is_keyed=True + "objectPresentCount": __TypeNames( + api_name="objectPresentCount", + attr_name="object_present_count", + class_name="ObjectPresentCount", + is_keyed=True, ), - 'touchCount': __TypeNames( - api_name='touchCount', - attr_name='touch_count', - class_name='TouchCount', - is_keyed=True + "touchCount": __TypeNames( + api_name="touchCount", + attr_name="touch_count", + class_name="TouchCount", + is_keyed=True, ), - 'waterPresent': __TypeNames( - api_name='waterPresent', - attr_name='water_present', - class_name='WaterPresent', - is_keyed=True + "waterPresent": __TypeNames( + api_name="waterPresent", + attr_name="water_present", + class_name="WaterPresent", + is_keyed=True, ), - 'networkStatus': __TypeNames( - api_name='networkStatus', - attr_name='network_status', - class_name='NetworkStatus', + "networkStatus": __TypeNames( + api_name="networkStatus", + attr_name="network_status", + class_name="NetworkStatus", is_keyed=True, ), - 'batteryStatus': __TypeNames( - api_name='batteryStatus', - attr_name='battery_status', - class_name='BatteryStatus', + "batteryStatus": __TypeNames( + api_name="batteryStatus", + attr_name="battery_status", + class_name="BatteryStatus", is_keyed=True, ), - 'labelsChanged': __TypeNames( - api_name='labelsChanged', - attr_name='labels_changed', - class_name='LabelsChanged', + "labelsChanged": __TypeNames( + api_name="labelsChanged", + attr_name="labels_changed", + class_name="LabelsChanged", is_keyed=False, ), - 'connectionStatus': __TypeNames( - api_name='connectionStatus', - attr_name='connection_status', - class_name='ConnectionStatus', + "connectionStatus": __TypeNames( + api_name="connectionStatus", + attr_name="connection_status", + class_name="ConnectionStatus", is_keyed=True, ), - 'ethernetStatus': __TypeNames( - api_name='ethernetStatus', - attr_name='ethernet_status', - class_name='EthernetStatus', + "ethernetStatus": __TypeNames( + api_name="ethernetStatus", + attr_name="ethernet_status", + class_name="EthernetStatus", is_keyed=True, ), - 'cellularStatus': __TypeNames( - api_name='cellularStatus', - attr_name='cellular_status', - class_name='CellularStatus', + "cellularStatus": __TypeNames( + api_name="cellularStatus", + attr_name="cellular_status", + class_name="CellularStatus", is_keyed=True, ), - 'co2': __TypeNames( - api_name='co2', - attr_name='co2', - class_name='Co2', + "co2": __TypeNames( + api_name="co2", + attr_name="co2", + class_name="Co2", is_keyed=True, ), - 'pressure': __TypeNames( - api_name='pressure', - attr_name='pressure', - class_name='Pressure', + "pressure": __TypeNames( + api_name="pressure", + attr_name="pressure", + class_name="Pressure", is_keyed=True, ), - 'motion': __TypeNames( - api_name='motion', - attr_name='motion', - class_name='Motion', + "motion": __TypeNames( + api_name="motion", + attr_name="motion", + class_name="Motion", is_keyed=True, ), - 'deskOccupancy': __TypeNames( - api_name='deskOccupancy', - attr_name='desk_occupancy', - class_name='DeskOccupancy', + "deskOccupancy": __TypeNames( + api_name="deskOccupancy", + attr_name="desk_occupancy", + class_name="DeskOccupancy", is_keyed=True, ), - 'contact': __TypeNames( - api_name='contact', - attr_name='contact', - class_name='Contact', + "contact": __TypeNames( + api_name="contact", + attr_name="contact", + class_name="Contact", is_keyed=True, ), - 'probeWireStatus': __TypeNames( - api_name='probeWireStatus', - attr_name='probe_wire_status', - class_name='ProbeWireStatus', + "probeWireStatus": __TypeNames( + api_name="probeWireStatus", + attr_name="probe_wire_status", + class_name="ProbeWireStatus", is_keyed=True, ), } @@ -2461,8 +2440,22 @@ def __init__(self, _EVENTS_MAP = __EventsMap() _EventType = Union[ - Touch, Temperature, ObjectPresent, Humidity, ObjectPresentCount, - TouchCount, WaterPresent, NetworkStatus, NetworkStatusCloudConnector, - BatteryStatus, LabelsChanged, ConnectionStatus, EthernetStatus, - CellularStatus, Co2, Pressure, Motion, DeskOccupancy, + Touch, + Temperature, + ObjectPresent, + Humidity, + ObjectPresentCount, + TouchCount, + WaterPresent, + NetworkStatus, + NetworkStatusCloudConnector, + BatteryStatus, + LabelsChanged, + ConnectionStatus, + EthernetStatus, + CellularStatus, + Co2, + Pressure, + Motion, + DeskOccupancy, ] diff --git a/disruptive/logging.py b/disruptive/logging.py index 807fd91..928c8d9 100644 --- a/disruptive/logging.py +++ b/disruptive/logging.py @@ -6,15 +6,15 @@ import disruptive import disruptive.errors as dterrors -DEBUG = 'DEBUG' -INFO = 'INFO' -WARNING = 'WARNING' -ERROR = 'ERROR' -CRITICAL = 'CRITICAL' +DEBUG = "DEBUG" +INFO = "INFO" +WARNING = "WARNING" +ERROR = "ERROR" +CRITICAL = "CRITICAL" LOG_LEVELS = [DEBUG, INFO, WARNING, ERROR, CRITICAL] # Fetch the disruptive logger, but with disabled output. -logger = logging.getLogger('disruptive') +logger = logging.getLogger("disruptive") logger.setLevel(99) @@ -61,8 +61,9 @@ def _log_flag_exceeds(level: str) -> bool: # to default before raising the exception. disruptive.log_level = INFO - msg = f'Invalid log_level {set_level}.\n' \ - f'Must be either of {LOG_LEVELS}.' + msg = ( + f"Invalid log_level {set_level}.\nMust be either of {LOG_LEVELS}." + ) raise dterrors.ConfigurationError(msg) # Check if level is exceeded. @@ -73,4 +74,4 @@ def _log_flag_exceeds(level: str) -> bool: def _fmt_log(msg: str | dict, level: str) -> None: - print(f'[{datetime.now().isoformat()}] {level:<8} - {msg}') + print(f"[{datetime.now().isoformat()}] {level:<8} - {msg}") diff --git a/disruptive/outputs.py b/disruptive/outputs.py index 44fa401..40d915f 100644 --- a/disruptive/outputs.py +++ b/disruptive/outputs.py @@ -30,7 +30,7 @@ def __init__(self, raw: dict) -> None: self.raw: dict = raw def __repr__(self) -> str: - return '{}.{}({})'.format( + return "{}.{}({})".format( self.__class__.__module__, self.__class__.__name__, self._raw, @@ -38,26 +38,26 @@ def __repr__(self) -> str: def __str__(self) -> str: out = self.__str__recursive([], self, level=0) - return '\n'.join(out) + return "\n".join(out) def __str__recursive(self, out: list, obj: object, level: int) -> list: # Set the indent level for formatting. n_spaces = 4 - l0 = level*' '*n_spaces - l1 = (level+1)*' '*n_spaces - l2 = (level+2)*' '*n_spaces + l0 = level * " " * n_spaces + l1 = (level + 1) * " " * n_spaces + l2 = (level + 2) * " " * n_spaces # At first recursive depth, print object name. if level == 0: - out.append(l0 + str(obj.__class__.__name__) + '(') + out.append(l0 + str(obj.__class__.__name__) + "(") # Append the various public attributes recursively. for a in vars(obj): # Skip private attributes. - if a.startswith('_'): + if a.startswith("_"): continue # Also skip raw as it would only be duplicate information. - elif a == 'raw': + elif a == "raw": continue # Fetch and evaluate the attribute value / type. @@ -65,50 +65,59 @@ def __str__recursive(self, out: list, obj: object, level: int) -> list: # Class objects should be dumped recursively, except for # those that are an instance of datetime, like pandas timestamps. - if hasattr(val, '__dict__') and not isinstance(val, datetime): + if hasattr(val, "__dict__") and not isinstance(val, datetime): # Other classes should print name with content recursively. - out.append('{}{}: {} = {}'.format( - l1, a, type(val).__name__, - str(val.__class__.__name__) + '(')) - self.__str__recursive(out, val, level=level+1) + out.append( + "{}{}: {} = {}".format( + l1, + a, + type(val).__name__, + str(val.__class__.__name__) + "(", + ) + ) + self.__str__recursive(out, val, level=level + 1) # Lists content should be iterated through. elif isinstance(val, list): - out.append('{}{}: {} = {}'.format( - l1, a, type(val).__name__, '[')) - self.__str__list(out, val, level+1, l2) - out.append(l1 + '],') + out.append( + "{}{}: {} = {}".format(l1, a, type(val).__name__, "[") + ) + self.__str__list(out, val, level + 1, l2) + out.append(l1 + "],") # Other types can be printed directly. else: - out.append('{}{}: {} = {},'.format( - l1, a, type(val).__name__, str(val) - )) + out.append( + "{}{}: {} = {},".format( + l1, a, type(val).__name__, str(val) + ) + ) # At the end of each recursive depth, end object paranthesis. - out.append(l0 + '),') + out.append(l0 + "),") return out - def __str__list(self, - out: list, - lst: list, - level: int, - l1: str, - ) -> list: + def __str__list( + self, + out: list, + lst: list, + level: int, + l1: str, + ) -> list: for val in lst: # Class objects should be dumped recursively. - if hasattr(val, '__dict__'): - out.append('{}{}'.format( - l1, str(val.__class__.__name__) + '(' - )) - self.__str__recursive(out, val, level=level+2) + if hasattr(val, "__dict__"): + out.append( + "{}{}".format(l1, str(val.__class__.__name__) + "(") + ) + self.__str__recursive(out, val, level=level + 2) # Everything else can be printed directly. else: - out.append('{}{} = {},'.format( - l1, type(val).__name__, str(val) - )) + out.append( + "{}{} = {},".format(l1, type(val).__name__, str(val)) + ) return out @@ -149,10 +158,10 @@ def __init__(self, member: dict) -> None: OutputBase.__init__(self, member) # Unpack attributes from dictionary. - self.member_id = member['name'].split('/')[-1] - self.display_name = member['displayName'] - self.roles = [r.split('/')[-1] for r in member['roles']] - self.status = member['status'] - self.email = member['email'] - self.account_type = member['accountType'] - self.create_time = dttrans.to_datetime(member['createTime']) + self.member_id = member["name"].split("/")[-1] + self.display_name = member["displayName"] + self.roles = [r.split("/")[-1] for r in member["roles"]] + self.status = member["status"] + self.email = member["email"] + self.account_type = member["accountType"] + self.create_time = dttrans.to_datetime(member["createTime"]) diff --git a/disruptive/requests.py b/disruptive/requests.py index 61c15fb..abea2eb 100644 --- a/disruptive/requests.py +++ b/disruptive/requests.py @@ -12,14 +12,13 @@ import disruptive.errors as dterrors -USER_AGENT = 'DisruptivePythonAPI/{} Python/{}'.format( +USER_AGENT = "DisruptivePythonAPI/{} Python/{}".format( dt.__version__, - f'{sys.version_info.major}.{sys.version_info.minor}', + f"{sys.version_info.major}.{sys.version_info.minor}", ) -class DTRequest(): - +class DTRequest: def __init__(self, method: str, url: str, **kwargs: Any): # Set attributes from parameters. self.method = method @@ -44,63 +43,63 @@ def __init__(self, method: str, url: str, **kwargs: Any): self.full_url = self.base_url + self.url def _unpack_kwargs(self, **kwargs: Any) -> None: - if 'params' in kwargs: - self.params = kwargs['params'] - if 'headers' in kwargs: - self.headers = kwargs['headers'] - if 'body' in kwargs: - self.body = kwargs['body'] - if 'data' in kwargs: - self.data = kwargs['data'] + if "params" in kwargs: + self.params = kwargs["params"] + if "headers" in kwargs: + self.headers = kwargs["headers"] + if "body" in kwargs: + self.body = kwargs["body"] + if "data" in kwargs: + self.data = kwargs["data"] # Check if request_timeout is overriden. - if 'request_timeout' in kwargs: - self.request_timeout = kwargs['request_timeout'] + if "request_timeout" in kwargs: + self.request_timeout = kwargs["request_timeout"] # Check if request_attempts is overriden. - if 'request_attempts' in kwargs: - self.request_attempts = kwargs['request_attempts'] + if "request_attempts" in kwargs: + self.request_attempts = kwargs["request_attempts"] # Check if base_url is overriden. - if 'base_url' in kwargs: - self.base_url = kwargs['base_url'] + if "base_url" in kwargs: + self.base_url = kwargs["base_url"] # Add authorization header to request except when explicitly otherwise. - if 'skip_auth' not in kwargs or kwargs['skip_auth'] is False: + if "skip_auth" not in kwargs or kwargs["skip_auth"] is False: # If provided, override the package-wide auth with provided object. - if 'auth' in kwargs: - self.headers['Authorization'] = kwargs['auth'].get_token() + if "auth" in kwargs: + self.headers["Authorization"] = kwargs["auth"].get_token() # If not, use package-wide auth object. else: - self.headers['Authorization'] = dt.default_auth.get_token() + self.headers["Authorization"] = dt.default_auth.get_token() def _sanitize_arguments(self) -> None: # Check that request_timeout > 0. if self.request_timeout <= 0: raise dterrors.ConfigurationError( - 'Configuration parameter request_timeout has value {}, but ' - 'must be float greater than 0.'.format(self.request_timeout) + "Configuration parameter request_timeout has value {}, but " + "must be float greater than 0.".format(self.request_timeout) ) # Check that request_attempts > 0. if self.request_attempts <= 0: raise dterrors.ConfigurationError( - 'Configuration parameter request_attempts has value {}, but ' - 'must be integer greater than 0.'.format(self.request_attempts) + "Configuration parameter request_attempts has value {}, but " + "must be integer greater than 0.".format(self.request_attempts) ) - def _request_wrapper(self, - method: str, - url: str, - params: dict, - headers: dict, - body: Optional[dict], - data: Optional[str], - timeout: int, - ) -> tuple[DTResponse, Any]: - + def _request_wrapper( + self, + method: str, + url: str, + params: dict, + headers: dict, + body: Optional[dict], + data: Optional[str], + timeout: int, + ) -> tuple[DTResponse, Any]: # Add custom user agent. - headers['User-Agent'] = USER_AGENT + headers["User-Agent"] = USER_AGENT # Define default response values. res = None @@ -148,10 +147,9 @@ def _send_request(self, nth_attempt: int = 0) -> dict: """ # Log the request. - dtlog.debug('Request [{}] to {}.'.format( - self.method, - self.base_url + self.url - )) + dtlog.debug( + "Request [{}] to {}.".format(self.method, self.base_url + self.url) + ) res, req_error = self._request_wrapper( method=self.method, @@ -164,9 +162,7 @@ def _send_request(self, nth_attempt: int = 0) -> dict: ) # Log the response. - dtlog.debug('Response [{}].'.format( - res.status_code - )) + dtlog.debug("Response [{}].".format(res.status_code)) # If _request_wrapper raised an exception, the request failed. if req_error is not None: @@ -183,19 +179,21 @@ def _send_request(self, nth_attempt: int = 0) -> dict: # Check if retry is required. if should_retry and nth_attempt < self.request_attempts: - dtlog.warning('Reconnecting in {}s.'.format(sleeptime)) + dtlog.warning("Reconnecting in {}s.".format(sleeptime)) # Sleep if necessary. if sleeptime is not None: time.sleep(sleeptime) - dtlog.info('Connection attempt {} of {}.'.format( - nth_attempt+1, - self.request_attempts, - )) + dtlog.info( + "Connection attempt {} of {}.".format( + nth_attempt + 1, + self.request_attempts, + ) + ) # Attempt the request again recursively, iterating counter. - res.data = self._send_request(nth_attempt+1) + res.data = self._send_request(nth_attempt + 1) else: # If set, raise the error chosen by dterrors.parse_error(). @@ -207,35 +205,36 @@ def _send_request(self, nth_attempt: int = 0) -> dict: @classmethod def get(cls, url: str, **kwargs: Any) -> dict: - req = cls('GET', url, **kwargs) + req = cls("GET", url, **kwargs) response: dict = req._send_request() return response @classmethod def post(cls, url: str, **kwargs: Any) -> dict: - req = cls('POST', url, **kwargs) + req = cls("POST", url, **kwargs) response: dict = req._send_request() return response @classmethod def patch(cls, url: str, **kwargs: Any) -> dict: - req = cls('PATCH', url, **kwargs) + req = cls("PATCH", url, **kwargs) response: dict = req._send_request() return response @classmethod def delete(cls, url: str, **kwargs: Any) -> dict: - req = cls('DELETE', url, **kwargs) + req = cls("DELETE", url, **kwargs) response: dict = req._send_request() return response @classmethod - def paginated_get(cls, - url: str, - pagination_key: str, - params: dict[str, str] = {}, - **kwargs: Any, - ) -> list: + def paginated_get( + cls, + url: str, + pagination_key: str, + params: dict[str, str] = {}, + **kwargs: Any, + ) -> list: # Initialize output list. results = [] @@ -244,8 +243,8 @@ def paginated_get(cls, response = cls.get(url, params=params, **kwargs) results += response[pagination_key] - if len(response['nextPageToken']) > 0: - params['pageToken'] = response['nextPageToken'] + if len(response["nextPageToken"]) > 0: + params["pageToken"] = response["nextPageToken"] else: break @@ -274,35 +273,35 @@ def stream(url: str, **kwargs: Any) -> Generator: error = None # Unpack kwargs. - params = kwargs['params'] if 'params' in kwargs else {} - headers = kwargs['headers'] if 'headers' in kwargs else {} - if 'request_attempts' in kwargs: - request_attempts = kwargs['request_attempts'] + params = kwargs["params"] if "params" in kwargs else {} + headers = kwargs["headers"] if "headers" in kwargs else {} + if "request_attempts" in kwargs: + request_attempts = kwargs["request_attempts"] else: request_attempts = dt.request_attempts # Add ping parameter to dictionary. - params['ping_interval'] = str(PING_INTERVAL) + 's' + params["ping_interval"] = str(PING_INTERVAL) + "s" # Add custom user agent. - headers['User-Agent'] = USER_AGENT + headers["User-Agent"] = USER_AGENT # Set up a simple catch-all retry policy. nth_attempt = 0 while True: try: # Set the authorization header each retry in case we expire. - if 'auth' in kwargs: - headers['Authorization'] = kwargs['auth'].get_token() + if "auth" in kwargs: + headers["Authorization"] = kwargs["auth"].get_token() else: - headers['Authorization'] = dt.default_auth.get_token() + headers["Authorization"] = dt.default_auth.get_token() # Set up a stream connection. # Connection will timeout and reconnect if no single event # is received in an interval of ping_interval + ping_jitter. - dtlog.info('Starting stream...') + dtlog.info("Starting stream...") stream = requests.request( - method='GET', + method="GET", url=url, stream=True, timeout=PING_INTERVAL + PING_JITTER, @@ -313,29 +312,28 @@ def stream(url: str, **kwargs: Any) -> Generator: ) if stream.encoding is None: - stream.encoding = 'utf-8' + stream.encoding = "utf-8" # Iterate through the events as they come in (one per line). for line in stream.iter_lines(decode_unicode=True): # Decode the response payload and break on error. payload = json.loads(line) - if 'result' in payload: + if "result" in payload: # Reset retry counter. nth_attempt = 0 # Check for ping event. - event = payload['result']['event'] - if event['eventType'] == 'ping': - dtlog.debug('Ping received.') + event = payload["result"]["event"] + if event["eventType"] == "ping": + dtlog.debug("Ping received.") continue # Yield event to generator. yield event - elif 'error' in payload: + elif "error" in payload: error, _, _ = dterrors.parse_api_status_code( - payload['error']['code'], - payload, None, 0 + payload["error"]["code"], payload, None, 0 ) raise error @@ -343,7 +341,7 @@ def stream(url: str, **kwargs: Any) -> Generator: raise dterrors.UnknownError(payload) # If the stream finished, but without an error, break the loop. - msg = 'Stream ended without an error.' + msg = "Stream ended without an error." raise dterrors.ConnectionError(msg) except KeyboardInterrupt: @@ -356,17 +354,19 @@ def stream(url: str, **kwargs: Any) -> Generator: elif nth_attempt < request_attempts: sleeptime = nth_attempt**2 - dtlog.warning('Reconnecting in {}s.'.format(sleeptime)) + dtlog.warning("Reconnecting in {}s.".format(sleeptime)) # Exponential backoff in sleep time. time.sleep(sleeptime) # Iterate attempt counter. nth_attempt += 1 - dtlog.info('Connection attempt {} of {}.'.format( - nth_attempt, - request_attempts, - )) + dtlog.info( + "Connection attempt {} of {}.".format( + nth_attempt, + request_attempts, + ) + ) else: # To avoid printing the entire chain of re-raised # exceptions, limit the traceback. @@ -380,17 +380,19 @@ def stream(url: str, **kwargs: Any) -> Generator: # Print the error and try again up to max_request_attempts. if nth_attempt < request_attempts and should_retry: - dtlog.warning('Reconnecting in {}s.'.format(sleeptime)) + dtlog.warning("Reconnecting in {}s.".format(sleeptime)) # Exponential backoff in sleep time. time.sleep(sleeptime) # Iterate attempt counter. nth_attempt += 1 - dtlog.info('Connection attempt {} of {}.'.format( - nth_attempt, - request_attempts, - )) + dtlog.info( + "Connection attempt {} of {}.".format( + nth_attempt, + request_attempts, + ) + ) else: # To avoid printing the entire chain of re-raised @@ -399,14 +401,13 @@ def stream(url: str, **kwargs: Any) -> Generator: raise error from e -class DTResponse(): - - def __init__(self, - data: dict, - status_code: Optional[int], - headers: Any, - ): - +class DTResponse: + def __init__( + self, + data: dict, + status_code: Optional[int], + headers: Any, + ): self.data = data self.status_code = status_code self.headers = headers diff --git a/disruptive/resources/claim.py b/disruptive/resources/claim.py index c8997e9..5bf46b1 100644 --- a/disruptive/resources/claim.py +++ b/disruptive/resources/claim.py @@ -20,8 +20,8 @@ class Claim(dtoutputs.OutputBase): """ - KIT = 'KIT' - DEVICE = 'DEVICE' + KIT = "KIT" + DEVICE = "DEVICE" CLAIM_ITEMS = [KIT, DEVICE] def __init__(self, claim: dict) -> None: @@ -39,16 +39,18 @@ def __init__(self, claim: dict) -> None: dtoutputs.OutputBase.__init__(self, claim) # Unpack attributes from dictionary. - self.type: str = claim['type'] - self.claimed_item: Claim.ClaimKit | Claim.ClaimDevice = \ + self.type: str = claim["type"] + self.claimed_item: Claim.ClaimKit | Claim.ClaimDevice = ( self._resolve_type(claim) + ) @classmethod - def claim_info(cls, - identifier: str, - organization_id: Optional[str] = None, - **kwargs: Any, - ) -> Claim: + def claim_info( + cls, + identifier: str, + organization_id: Optional[str] = None, + **kwargs: Any, + ) -> Claim: """ Get claim information for either a device or a kit by looking up an identifier. @@ -92,23 +94,24 @@ def claim_info(cls, """ if not isinstance(identifier, str): - raise TypeError(f'Identifier must be str, got {type(identifier)}.') + raise TypeError(f"Identifier must be str, got {type(identifier)}.") - url = f'/claimInfo?identifier={identifier}' + url = f"/claimInfo?identifier={identifier}" # Add organization resource name to url if provided. if organization_id is not None: - url += f'&organization=organizations/{organization_id}' + url += f"&organization=organizations/{organization_id}" return cls(dtrequests.DTRequest.get(url, **kwargs)) @staticmethod - def claim(target_project_id: str, - kit_ids: Optional[list[str]] = None, - device_ids: Optional[list[str]] = None, - dry_run: bool = True, - **kwargs: Any, - ) -> tuple[list[Claim.ClaimDevice], list[Exception]]: + def claim( + target_project_id: str, + kit_ids: Optional[list[str]] = None, + device_ids: Optional[list[str]] = None, + dry_run: bool = True, + **kwargs: Any, + ) -> tuple[list[Claim.ClaimDevice], list[Exception]]: """ Claim multiple kits and/or devices to your project. @@ -182,31 +185,31 @@ def claim(target_project_id: str, """ - url = f'/projects/{target_project_id}/devices:claim' - url += f'?dryRun={str(dry_run).lower()}' + url = f"/projects/{target_project_id}/devices:claim" + url += f"?dryRun={str(dry_run).lower()}" body = {} if kit_ids is not None: - body['kitIds'] = kit_ids + body["kitIds"] = kit_ids if device_ids is not None: - body['deviceIds'] = device_ids + body["deviceIds"] = device_ids res = dtrequests.DTRequest.post(url, body=body, **kwargs) return ( - [Claim.ClaimDevice(d) for d in res['claimedDevices']], - Claim._parse_claim_errors(res['claimErrors']), + [Claim.ClaimDevice(d) for d in res["claimedDevices"]], + Claim._parse_claim_errors(res["claimErrors"]), ) @staticmethod def _parse_claim_errors(res_errors: dict) -> list[Exception]: errors: list[Exception] = [] - for error in res_errors['devices'] + res_errors['kits']: - if error['code'] == 'ALREADY_CLAIMED': + for error in res_errors["devices"] + res_errors["kits"]: + if error["code"] == "ALREADY_CLAIMED": errors.append(dterrors.ClaimErrorDeviceAlreadyClaimed(error)) - elif error['code'] == 'NOT_FOUND' and 'deviceId' in error: + elif error["code"] == "NOT_FOUND" and "deviceId" in error: errors.append(dterrors.ClaimErrorDeviceNotFound(error)) - elif error['code'] == 'NOT_FOUND' and 'kitId' in error: + elif error["code"] == "NOT_FOUND" and "kitId" in error: errors.append(dterrors.ClaimErrorKitNotFound(error)) else: errors.append(dterrors.ClaimError(error)) @@ -228,12 +231,12 @@ def _resolve_type(self, claim: dict) -> Claim.ClaimKit | Claim.ClaimDevice: """ - if claim['type'] == Claim.KIT: - return Claim.ClaimKit(claim['kit']) - elif claim['type'] == Claim.DEVICE: - return Claim.ClaimDevice(claim['device']) + if claim["type"] == Claim.KIT: + return Claim.ClaimKit(claim["kit"]) + elif claim["type"] == Claim.DEVICE: + return Claim.ClaimDevice(claim["device"]) else: - raise KeyError(f'unknown claim type {claim["type"]}') + raise KeyError(f"unknown claim type {claim['type']}") class ClaimDevice(dtoutputs.OutputBase): """ @@ -262,10 +265,10 @@ def __init__(self, device: dict) -> None: dtoutputs.OutputBase.__init__(self, device) # Unpack attributes from raw response dictionary. - self.device_id: str = device['deviceId'] - self.device_type: str = device['deviceType'] - self.product_number: str = device['productNumber'] - self.is_claimed: bool = device['isClaimed'] + self.device_id: str = device["deviceId"] + self.device_type: str = device["deviceType"] + self.product_number: str = device["productNumber"] + self.is_claimed: bool = device["isClaimed"] class ClaimKit(dtoutputs.OutputBase): """ @@ -292,7 +295,8 @@ def __init__(self, kit: dict) -> None: dtoutputs.OutputBase.__init__(self, kit) # Unpack attributes from raw response dictionary. - self.kit_id: str = kit['kitId'] - self.display_name: str = kit['displayName'] - self.devices: list[Claim.ClaimDevice] \ - = [Claim.ClaimDevice(d) for d in kit['devices']] + self.kit_id: str = kit["kitId"] + self.display_name: str = kit["displayName"] + self.devices: list[Claim.ClaimDevice] = [ + Claim.ClaimDevice(d) for d in kit["devices"] + ] diff --git a/disruptive/resources/data_connector.py b/disruptive/resources/data_connector.py index b17548e..a6b1ad6 100644 --- a/disruptive/resources/data_connector.py +++ b/disruptive/resources/data_connector.py @@ -45,7 +45,7 @@ class DataConnector(dtoutputs.OutputBase): """ # Constants for the various Data Connector configuration types. - HTTP_PUSH = 'HTTP_PUSH' + HTTP_PUSH = "HTTP_PUSH" DATA_CONNECTOR_TYPES = [HTTP_PUSH] def __init__(self, data_connector: dict) -> None: @@ -64,21 +64,22 @@ def __init__(self, data_connector: dict) -> None: dtoutputs.OutputBase.__init__(self, data_connector) # Unpack attributes from dictionary. - self.data_connector_id: str = data_connector['name'].split('/')[-1] - self.project_id: str = data_connector['name'].split('/')[1] - self.status: str = data_connector['status'] - self.display_name: str = data_connector['displayName'] - self.event_types: list[str] = data_connector['events'] - self.labels: list[str] = data_connector['labels'] - self.data_connector_type: str = data_connector['type'] + self.data_connector_id: str = data_connector["name"].split("/")[-1] + self.project_id: str = data_connector["name"].split("/")[1] + self.status: str = data_connector["status"] + self.display_name: str = data_connector["displayName"] + self.event_types: list[str] = data_connector["events"] + self.labels: list[str] = data_connector["labels"] + self.data_connector_type: str = data_connector["type"] self.config = self._from_dict(data_connector) @classmethod - def get_data_connector(cls, - data_connector_id: str, - project_id: str, - **kwargs: Any, - ) -> DataConnector: + def get_data_connector( + cls, + data_connector_id: str, + project_id: str, + **kwargs: Any, + ) -> DataConnector: """ Gets the current state of a single Data Connector. @@ -108,20 +109,23 @@ def get_data_connector(cls, """ # Construct URL - url = '/projects/{}/dataconnectors/{}' + url = "/projects/{}/dataconnectors/{}" url = url.format(project_id, data_connector_id) # Return DataConnector object of GET request response. - return cls(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return cls( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @classmethod - def list_data_connectors(cls, - project_id: str, - **kwargs: Any, - ) -> list[DataConnector]: + def list_data_connectors( + cls, + project_id: str, + **kwargs: Any, + ) -> list[DataConnector]: """ Gets a list of the current state of all Data Connectors in a project. @@ -147,22 +151,23 @@ def list_data_connectors(cls, # Return list of DataConnector objects of paginated GET response. data_connectors = dtrequests.DTRequest.paginated_get( - url='/projects/{}/dataconnectors'.format(project_id), - pagination_key='dataConnectors', + url="/projects/{}/dataconnectors".format(project_id), + pagination_key="dataConnectors", **kwargs, ) return [cls(dcon) for dcon in data_connectors] @classmethod - def create_data_connector(cls, - project_id: str, - config: disruptive.DataConnector.HttpPushConfig, - display_name: str = '', - status: str = 'ACTIVE', - event_types: list[str] = [], - labels: list[str] = [], - **kwargs: Any, - ) -> DataConnector: + def create_data_connector( + cls, + project_id: str, + config: disruptive.DataConnector.HttpPushConfig, + display_name: str = "", + status: str = "ACTIVE", + event_types: list[str] = [], + labels: list[str] = [], + **kwargs: Any, + ) -> DataConnector: """ Creates a new Data Connector in the specified project. @@ -227,26 +232,28 @@ def create_data_connector(cls, # Construct request body dictionary. body: dict = dict() - body['status'] = status - body['events'] = event_types - body['labels'] = labels + body["status"] = status + body["events"] = event_types + body["labels"] = labels if len(display_name) > 0: - body['displayName'] = display_name + body["displayName"] = display_name # Add the appropriate field depending on config. - body['type'] = config.data_connector_type + body["type"] = config.data_connector_type key, value = config._to_dict() body[key] = value # Construct URL. - url = '/projects/{}/dataconnectors'.format(project_id) + url = "/projects/{}/dataconnectors".format(project_id) # Return DataConnector object of POST request response. - return cls(dtrequests.DTRequest.post( - url=url, - body=body, - **kwargs, - )) + return cls( + dtrequests.DTRequest.post( + url=url, + body=body, + **kwargs, + ) + ) @classmethod def update_data_connector( @@ -322,13 +329,13 @@ def update_data_connector( # Construct request body dictionary. body: dict = dict() if display_name is not None: - body['displayName'] = display_name + body["displayName"] = display_name if status is not None: - body['status'] = status + body["status"] = status if event_types is not None: - body['events'] = event_types + body["events"] = event_types if labels is not None: - body['labels'] = labels + body["labels"] = labels # Add the appropriate field depending on config. if config is not None: @@ -336,22 +343,25 @@ def update_data_connector( body[key] = value # Construct URL. - url = '/projects/{}/dataconnectors/{}' + url = "/projects/{}/dataconnectors/{}" url = url.format(project_id, data_connector_id) # Return DataConnector object of PATCH request response. - return cls(dtrequests.DTRequest.patch( - url=url, - body=body, - **kwargs, - )) + return cls( + dtrequests.DTRequest.patch( + url=url, + body=body, + **kwargs, + ) + ) @classmethod - def delete_data_connector(cls, - data_connector_id: str, - project_id: str, - **kwargs: Any, - ) -> None: + def delete_data_connector( + cls, + data_connector_id: str, + project_id: str, + **kwargs: Any, + ) -> None: """ Deletes the specified Data Connector. @@ -376,7 +386,7 @@ def delete_data_connector(cls, """ # Construct URL. - url = '/projects/{}/dataconnectors/{}' + url = "/projects/{}/dataconnectors/{}" url = url.format(project_id, data_connector_id) # Send DELETE request, but return nothing. @@ -386,11 +396,12 @@ def delete_data_connector(cls, ) @classmethod - def get_metrics(cls, - data_connector_id: str, - project_id: str, - **kwargs: Any, - ) -> Metric: + def get_metrics( + cls, + data_connector_id: str, + project_id: str, + **kwargs: Any, + ) -> Metric: """ Get the metrics of the last 3 hours for a Data Connector. @@ -420,22 +431,25 @@ def get_metrics(cls, """ # Construct URL. - url = '/projects/{}/dataconnectors/{}' + url = "/projects/{}/dataconnectors/{}" url = url.format(project_id, data_connector_id) - url += ':metrics' + url += ":metrics" # Return Metric object of GET request response. - return Metric(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return Metric( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @classmethod - def sync_data_connector(cls, - data_connector_id: str, - project_id: str, - **kwargs: Any, - ) -> None: + def sync_data_connector( + cls, + data_connector_id: str, + project_id: str, + **kwargs: Any, + ) -> None: """ Synchronizes the current Data Connector state. @@ -464,9 +478,9 @@ def sync_data_connector(cls, """ # Construct URL. - url = '/projects/{}/dataconnectors/{}' + url = "/projects/{}/dataconnectors/{}" url = url.format(project_id, data_connector_id) - url += ':sync' + url += ":sync" # Send POST request, but return nothing. dtrequests.DTRequest.post( @@ -477,27 +491,29 @@ def sync_data_connector(cls, @classmethod def _from_dict(cls, data_connector: dict) -> Optional[HttpPushConfig]: # Isolate the Data Connector type. - data_connector_type = data_connector['type'] + data_connector_type = data_connector["type"] # Select the appropriate config depending on type. - if data_connector_type == 'HTTP_PUSH': + if data_connector_type == "HTTP_PUSH": # Isolate config field. - config = data_connector['httpConfig'] + config = data_connector["httpConfig"] # Create and return an HttpPush object. return cls.HttpPushConfig( - url=config['url'], - signature_secret=config['signatureSecret'], - headers=config['headers'], + url=config["url"], + signature_secret=config["signatureSecret"], + headers=config["headers"], ) else: # If this else statement runs, no config is available for type. - dtlog.warning('No config available for {} Data Connectors.'.format( - data_connector_type - )) + dtlog.warning( + "No config available for {} Data Connectors.".format( + data_connector_type + ) + ) return None - class HttpPushConfig(): + class HttpPushConfig: """ Type-specific configurations for the HTTP_PUSH Data Connector. @@ -512,13 +528,14 @@ class HttpPushConfig(): """ - data_connector_type = 'HTTP_PUSH' + data_connector_type = "HTTP_PUSH" - def __init__(self, - url: Optional[str] = None, - signature_secret: Optional[str] = None, - headers: Optional[dict] = None, - ) -> None: + def __init__( + self, + url: Optional[str] = None, + signature_secret: Optional[str] = None, + headers: Optional[dict] = None, + ) -> None: """ Constructs the HttpPushConfig object. @@ -541,12 +558,12 @@ def __init__(self, def _to_dict(self) -> tuple[str, dict]: config: dict = dict() if self.url is not None: - config['url'] = self.url + config["url"] = self.url if self.signature_secret is not None: - config['signatureSecret'] = self.signature_secret + config["signatureSecret"] = self.signature_secret if self.headers is not None: - config['headers'] = self.headers - return 'httpConfig', config + config["headers"] = self.headers + return "httpConfig", config class Metric(dtoutputs.OutputBase): @@ -574,6 +591,6 @@ def __init__(self, metric: dict) -> None: dtoutputs.OutputBase.__init__(self, metric) # Unpack attributes from dictionary. - self.success_count = metric['metrics']['successCount'] - self.error_count = metric['metrics']['errorCount'] - self.latency = metric['metrics']['latency99p'] + self.success_count = metric["metrics"]["successCount"] + self.error_count = metric["metrics"]["errorCount"] + self.latency = metric["metrics"]["latency99p"] diff --git a/disruptive/resources/device.py b/disruptive/resources/device.py index 1544a17..73eeae5 100644 --- a/disruptive/resources/device.py +++ b/disruptive/resources/device.py @@ -42,22 +42,31 @@ class Device(dtoutputs.OutputBase): """ # Constants for the various device types. - TEMPERATURE: str = 'temperature' - PROXIMITY: str = 'proximity' - TOUCH: str = 'touch' - HUMIDITY: str = 'humidity' - PROXIMITY_COUNTER: str = 'proximityCounter' - TOUCH_COUNTER: str = 'touchCounter' - WATER_DETECTOR: str = 'waterDetector' - CLOUD_CONNECTOR: str = 'ccon' - CO2: str = 'co2' - MOTION: str = 'motion' - DESK_OCCUPANCY: str = 'deskOccupancy' - CONTACT: str = 'contact' + TEMPERATURE: str = "temperature" + PROXIMITY: str = "proximity" + TOUCH: str = "touch" + HUMIDITY: str = "humidity" + PROXIMITY_COUNTER: str = "proximityCounter" + TOUCH_COUNTER: str = "touchCounter" + WATER_DETECTOR: str = "waterDetector" + CLOUD_CONNECTOR: str = "ccon" + CO2: str = "co2" + MOTION: str = "motion" + DESK_OCCUPANCY: str = "deskOccupancy" + CONTACT: str = "contact" DEVICE_TYPES = [ - TEMPERATURE, PROXIMITY, TOUCH, HUMIDITY, - PROXIMITY_COUNTER, TOUCH_COUNTER, WATER_DETECTOR, - CLOUD_CONNECTOR, CO2, MOTION, DESK_OCCUPANCY, CONTACT, + TEMPERATURE, + PROXIMITY, + TOUCH, + HUMIDITY, + PROXIMITY_COUNTER, + TOUCH_COUNTER, + WATER_DETECTOR, + CLOUD_CONNECTOR, + CO2, + MOTION, + DESK_OCCUPANCY, + CONTACT, ] def __init__(self, device: dict) -> None: @@ -75,38 +84,39 @@ def __init__(self, device: dict) -> None: dtoutputs.OutputBase.__init__(self, device) # Unpack attributes from dictionary. - self.device_id: str = device['name'].split('/')[-1] - self.project_id: str = device['name'].split('/')[1] - self.device_type: str = device['type'] - self.labels: dict = device['labels'] + self.device_id: str = device["name"].split("/")[-1] + self.project_id: str = device["name"].split("/")[1] + self.device_type: str = device["type"] + self.labels: dict = device["labels"] # Set display_name if `name` label key exists. self.display_name: str | None = None - if 'name' in self.labels: - self.display_name = self.labels['name'] + if "name" in self.labels: + self.display_name = self.labels["name"] # Determine if the device is an emulator by checking id prefix. self.is_emulated: bool = False - if self.device_id.startswith('emu') and len(self.device_id) == 23: + if self.device_id.startswith("emu") and len(self.device_id) == 23: self.is_emulated = True # If it exists, set the product number. # This is not present for emulated devices. - self.product_number: str = '' - if 'productNumber' in device: - self.product_number = device['productNumber'] + self.product_number: str = "" + if "productNumber" in device: + self.product_number = device["productNumber"] # If it exists, set the reported object. self.reported: Reported | None = None - if 'reported' in device: - self.reported = Reported(device['reported']) + if "reported" in device: + self.reported = Reported(device["reported"]) @classmethod - def get_device(cls, - device_id: str, - project_id: Optional[str] = None, - **kwargs: Any, - ) -> Device: + def get_device( + cls, + device_id: str, + project_id: Optional[str] = None, + **kwargs: Any, + ) -> Device: """ Gets the current state of a single device. @@ -136,26 +146,27 @@ def get_device(cls, # If project_id is not given, use wildcard "-". if project_id is None: - project_id = '-' + project_id = "-" # Construct URL - url = '/projects/{}/devices/{}'.format(project_id, device_id) + url = "/projects/{}/devices/{}".format(project_id, device_id) # Return Device object of GET request response. return cls(dtrequests.DTRequest.get(url, **kwargs)) @classmethod - def list_devices(cls, - project_id: str, - query: Optional[str] = None, - device_ids: Optional[list[str]] = None, - device_types: Optional[list[str]] = None, - label_filters: Optional[dict[str, str]] = None, - order_by: Optional[str] = None, - organization_id: Optional[str] = None, - project_ids: Optional[list[str]] = None, - **kwargs: Any, - ) -> list[Device]: + def list_devices( + cls, + project_id: str, + query: Optional[str] = None, + device_ids: Optional[list[str]] = None, + device_types: Optional[list[str]] = None, + label_filters: Optional[dict[str, str]] = None, + order_by: Optional[str] = None, + organization_id: Optional[str] = None, + project_ids: Optional[list[str]] = None, + **kwargs: Any, + ) -> list[Device]: """ Gets a list of devices from either a project or projects in an organization. @@ -234,62 +245,63 @@ def list_devices(cls, """ # Enforce organization_id if project_id is wildcard. - if project_id == '-' and organization_id is None: + if project_id == "-" and organization_id is None: raise ValueError( - 'Parameter `organization_id` is required when ' + "Parameter `organization_id` is required when " '`project_id` is wildcard `"-"`.' ) # Warn about unsupported combination of parameters. - if project_id != '-' and organization_id is not None: + if project_id != "-" and organization_id is not None: warnings.warn( - 'Parameter `organization_id` is ignored when ' + "Parameter `organization_id` is ignored when " '`project_id` is not wildcard "-".', UserWarning, ) - if project_id != '-' and project_ids is not None: + if project_id != "-" and project_ids is not None: warnings.warn( - 'Parameter `project_ids` is ignored when ' + "Parameter `project_ids` is ignored when " '`project_id` is not wildcard "-".', UserWarning, ) params: dict = dict() if query is not None: - params['query'] = query + params["query"] = query if device_ids is not None: - params['device_ids'] = device_ids + params["device_ids"] = device_ids if device_types is not None: - params['device_types'] = device_types + params["device_types"] = device_types if order_by is not None: - params['order_by'] = order_by + params["order_by"] = order_by if organization_id is not None: - params['organization'] = 'organizations/' + organization_id + params["organization"] = "organizations/" + organization_id if project_ids is not None: - params['projects'] = ['projects/' + xid for xid in project_ids] + params["projects"] = ["projects/" + xid for xid in project_ids] # Convert label_filters dictionary to list of strings. if label_filters is not None: labels_list = [] for key in label_filters: - labels_list.append(key + '=' + label_filters[key]) - params['label_filters'] = labels_list + labels_list.append(key + "=" + label_filters[key]) + params["label_filters"] = labels_list # Return list of Device objects of paginated GET response. devices = dtrequests.DTRequest.paginated_get( - url=f'/projects/{project_id}/devices', - pagination_key='devices', + url=f"/projects/{project_id}/devices", + pagination_key="devices", params=params, **kwargs, ) return [cls(device) for device in devices] @staticmethod - def transfer_devices(device_ids: list[str], - source_project_id: str, - target_project_id: str, - **kwargs: Any, - ) -> list[TransferDeviceError]: + def transfer_devices( + device_ids: list[str], + source_project_id: str, + target_project_id: str, + **kwargs: Any, + ) -> list[TransferDeviceError]: """ Transfers all specified devices to the target project. @@ -329,33 +341,30 @@ def transfer_devices(device_ids: list[str], """ # Construct list of devices. - name = 'projects/{}/devices/{}' + name = "projects/{}/devices/{}" devices = [name.format(source_project_id, xid) for xid in device_ids] # Construct request body dictionary. - body = { - "devices": devices - } + body = {"devices": devices} # Sent POST request. response = dtrequests.DTRequest.post( - url='/projects/{}/devices:transfer'.format( - target_project_id - ), + url="/projects/{}/devices:transfer".format(target_project_id), body=body, **kwargs, ) # Return any transferErrors found in response. - return [TransferDeviceError(err) for err in response['transferErrors']] + return [TransferDeviceError(err) for err in response["transferErrors"]] @staticmethod - def set_label(device_id: str, - project_id: str, - key: str, - value: str, - **kwargs: Any, - ) -> list[LabelUpdateError]: + def set_label( + device_id: str, + project_id: str, + key: str, + value: str, + **kwargs: Any, + ) -> list[LabelUpdateError]: """ Set a label key and value for a single device. @@ -394,25 +403,26 @@ def set_label(device_id: str, """ # Construct URL. - url = '/projects/{}/devices:batchUpdate'.format(project_id) + url = "/projects/{}/devices:batchUpdate".format(project_id) # Construct request body dictionary. body: dict = dict() - body['devices'] = ['projects/' + project_id + '/devices/' + device_id] - body['addLabels'] = {key: value} + body["devices"] = ["projects/" + project_id + "/devices/" + device_id] + body["addLabels"] = {key: value} # Sent POST request. response = dtrequests.DTRequest.post(url, body=body, **kwargs) # Return any batchErrors found in response. - return [LabelUpdateError(err) for err in response['batchErrors']] + return [LabelUpdateError(err) for err in response["batchErrors"]] @staticmethod - def remove_label(device_id: str, - project_id: str, - key: str, - **kwargs: Any, - ) -> list[LabelUpdateError]: + def remove_label( + device_id: str, + project_id: str, + key: str, + **kwargs: Any, + ) -> list[LabelUpdateError]: """ Remove a label (key and value) from a single device. @@ -446,26 +456,27 @@ def remove_label(device_id: str, """ # Construct URL. - url = '/projects/{}/devices:batchUpdate'.format(project_id) + url = "/projects/{}/devices:batchUpdate".format(project_id) # Construct request body dictionary. body: dict = dict() - body['devices'] = ['projects/' + project_id + '/devices/' + device_id] - body['removeLabels'] = [key] + body["devices"] = ["projects/" + project_id + "/devices/" + device_id] + body["removeLabels"] = [key] # Sent POST request. response = dtrequests.DTRequest.post(url, body=body, **kwargs) # Return any batchErrors found in response. - return [LabelUpdateError(err) for err in response['batchErrors']] + return [LabelUpdateError(err) for err in response["batchErrors"]] @staticmethod - def batch_update_labels(device_ids: list[str], - project_id: str, - set_labels: Optional[dict[str, str]] = None, - remove_labels: Optional[list[str]] = None, - **kwargs: Any, - ) -> list[LabelUpdateError]: + def batch_update_labels( + device_ids: list[str], + project_id: str, + set_labels: Optional[dict[str, str]] = None, + remove_labels: Optional[list[str]] = None, + **kwargs: Any, + ) -> list[LabelUpdateError]: """ Add, update, or remove multiple labels (key and value) on multiple devices @@ -523,25 +534,25 @@ def batch_update_labels(device_ids: list[str], """ # Construct list of devices. - name = 'projects/{}/devices/{}' + name = "projects/{}/devices/{}" devices = [name.format(project_id, xid) for xid in device_ids] # Construct request body dictionary. body: dict = dict() - body['devices'] = devices + body["devices"] = devices if set_labels is not None: - body['addLabels'] = set_labels + body["addLabels"] = set_labels if remove_labels is not None: - body['removeLabels'] = remove_labels + body["removeLabels"] = remove_labels # Construct URL. - url = '/projects/{}/devices:batchUpdate'.format(project_id) + url = "/projects/{}/devices:batchUpdate".format(project_id) # Sent POST request. response = dtrequests.DTRequest.post(url, body=body, **kwargs) # Return any batchErrors found in response. - return [LabelUpdateError(err) for err in response['batchErrors']] + return [LabelUpdateError(err) for err in response["batchErrors"]] class Reported(dtoutputs.OutputBase): @@ -657,4 +668,4 @@ def __unpack(self) -> None: data, ) else: - dtlog.warning('Skipping unknown reported type {}.'.format(key)) + dtlog.warning("Skipping unknown reported type {}.".format(key)) diff --git a/disruptive/resources/emulator.py b/disruptive/resources/emulator.py index 3521a06..caf02e0 100644 --- a/disruptive/resources/emulator.py +++ b/disruptive/resources/emulator.py @@ -7,7 +7,7 @@ from disruptive.resources.device import Device -class Emulator(): +class Emulator: """ Contains staticmethods for the emulator resource. Used for namespacing only and thus does not have a constructor @@ -15,12 +15,13 @@ class Emulator(): """ @staticmethod - def create_device(project_id: str, - device_type: str, - display_name: Optional[str] = None, - labels: dict[str, str] = {}, - **kwargs: Any, - ) -> Device: + def create_device( + project_id: str, + device_type: str, + display_name: Optional[str] = None, + labels: dict[str, str] = {}, + **kwargs: Any, + ) -> Device: """ Create a new emulated device with specified type and project. @@ -61,30 +62,33 @@ def create_device(project_id: str, """ # Construct URL - url = '/projects/{}/devices'.format(project_id) + url = "/projects/{}/devices".format(project_id) # Construct body dictionary. body: dict = dict() - body['type'] = device_type - body['labels'] = labels + body["type"] = device_type + body["labels"] = labels # Add display_name to labels dictionary in body. if display_name is not None: - body['labels']['name'] = display_name + body["labels"]["name"] = display_name # Return Device object of GET request response. - return Device(dtrequests.DTRequest.post( - url=url, - base_url=disruptive.emulator_base_url, - body=body, - **kwargs, - )) + return Device( + dtrequests.DTRequest.post( + url=url, + base_url=disruptive.emulator_base_url, + body=body, + **kwargs, + ) + ) @staticmethod - def delete_device(device_id: str, - project_id: str, - **kwargs: Any, - ) -> None: + def delete_device( + device_id: str, + project_id: str, + **kwargs: Any, + ) -> None: """ Deletes the specified emulated device. @@ -106,7 +110,7 @@ def delete_device(device_id: str, """ # Construct URL - url = '/projects/{}/devices/{}'.format(project_id, device_id) + url = "/projects/{}/devices/{}".format(project_id, device_id) # Send DELETE request, but return nothing. dtrequests.DTRequest.delete( @@ -116,28 +120,29 @@ def delete_device(device_id: str, ) @staticmethod - def publish_event(device_id: str, - project_id: str, - data: disruptive.events.Touch | - disruptive.events.Temperature | - disruptive.events.ObjectPresent | - disruptive.events.Humidity | - disruptive.events.ObjectPresentCount | - disruptive.events.TouchCount | - disruptive.events.WaterPresent | - disruptive.events.NetworkStatus | - disruptive.events.BatteryStatus | - disruptive.events.ConnectionStatus | - disruptive.events.EthernetStatus | - disruptive.events.CellularStatus | - disruptive.events.Co2 | - disruptive.events.Pressure | - disruptive.events.Motion | - disruptive.events.DeskOccupancy | - disruptive.events.Contact | - disruptive.events.ProbeWireStatus, - **kwargs: Any, - ) -> None: + def publish_event( + device_id: str, + project_id: str, + data: disruptive.events.Touch + | disruptive.events.Temperature + | disruptive.events.ObjectPresent + | disruptive.events.Humidity + | disruptive.events.ObjectPresentCount + | disruptive.events.TouchCount + | disruptive.events.WaterPresent + | disruptive.events.NetworkStatus + | disruptive.events.BatteryStatus + | disruptive.events.ConnectionStatus + | disruptive.events.EthernetStatus + | disruptive.events.CellularStatus + | disruptive.events.Co2 + | disruptive.events.Pressure + | disruptive.events.Motion + | disruptive.events.DeskOccupancy + | disruptive.events.Contact + | disruptive.events.ProbeWireStatus, + **kwargs: Any, + ) -> None: """ From the specified device, publish an event of the given type. @@ -172,7 +177,7 @@ def publish_event(device_id: str, """ # Construct URL - url = '/projects/{}/devices/{}:publish'.format(project_id, device_id) + url = "/projects/{}/devices/{}:publish".format(project_id, device_id) # Send POST request, but return nothing. dtrequests.DTRequest.post( diff --git a/disruptive/resources/eventhistory.py b/disruptive/resources/eventhistory.py index 9e91d2c..ad87183 100644 --- a/disruptive/resources/eventhistory.py +++ b/disruptive/resources/eventhistory.py @@ -17,13 +17,14 @@ class EventHistory(list): """ @staticmethod - def list_events(device_id: str, - project_id: str, - event_types: Optional[list[str]] = None, - start_time: Optional[str | datetime] = None, - end_time: Optional[str | datetime] = None, - **kwargs: Any, - ) -> EventHistory: + def list_events( + device_id: str, + project_id: str, + event_types: Optional[list[str]] = None, + start_time: Optional[str | datetime] = None, + end_time: Optional[str | datetime] = None, + **kwargs: Any, + ) -> EventHistory: """ Get the event history for a single device. @@ -74,25 +75,25 @@ def list_events(device_id: str, """ # Construct URL. - url = '/projects/{}/devices/{}/events'.format(project_id, device_id) + url = "/projects/{}/devices/{}/events".format(project_id, device_id) # Construct parameters dictionary. params: dict = dict() if event_types is not None: - params['eventTypes'] = event_types + params["eventTypes"] = event_types # Sanitize timestamps as they must be iso8601 format. start_time_iso8601 = dttrans.to_iso8601(start_time) if start_time_iso8601 is not None: - params['startTime'] = start_time_iso8601 + params["startTime"] = start_time_iso8601 end_time_iso8601 = dttrans.to_iso8601(end_time) if end_time_iso8601 is not None: - params['endTime'] = end_time_iso8601 + params["endTime"] = end_time_iso8601 # Send paginated GET request. res = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='events', + pagination_key="events", params=params, **kwargs, ) @@ -118,16 +119,18 @@ def _to_dataframe_format(self) -> list[dict]: rows = [] for event in self: base = { - 'device_id': event.device_id, - 'event_id': event.event_id, - 'event_type': event.event_type, + "device_id": event.device_id, + "event_id": event.event_id, + "event_type": event.event_type, } if event.event_type == disruptive.events.TEMPERATURE: - rows += [{ + rows += [ + { **base, **sample.raw, - } for sample in event.data.samples + } + for sample in event.data.samples ] else: rows.append({**base, **event.data.raw}) @@ -155,16 +158,18 @@ def to_pandas(self) -> Any: import pandas # type: ignore except ModuleNotFoundError: raise ModuleNotFoundError( - 'Missing package `pandas`.\n\n' - 'to_dataframe() requires additional third-party packages.\n' - '>> pip install disruptive[extra]' + "Missing package `pandas`.\n\n" + "to_dataframe() requires additional third-party packages.\n" + ">> pip install disruptive[extra]" ) rows = self._to_dataframe_format() df = pandas.json_normalize( - rows, None, ['device_id', 'event_id', 'event_type'], - errors='ignore', + rows, + None, + ["device_id", "event_id", "event_type"], + errors="ignore", ) # Convert columns headers from camelCase to snake_case for consistency. @@ -194,9 +199,9 @@ def to_polars(self) -> Any: import polars as pl # type: ignore except ModuleNotFoundError: raise ModuleNotFoundError( - 'Missing package `pandas`.\n\n' - 'to_dataframe() requires additional third-party packages.\n' - '>> pip install disruptive[extra]' + "Missing package `pandas`.\n\n" + "to_dataframe() requires additional third-party packages.\n" + ">> pip install disruptive[extra]" ) rows = self._to_dataframe_format() @@ -208,10 +213,14 @@ def to_polars(self) -> Any: df = df.rename(mapping=map) # Convert timestamp columns to datetime type. - if 'update_time' in df.columns \ - and df['update_time'].dtype == pl.String: - df = df.with_columns([ - pl.col('update_time').str.to_datetime(), - ]) + if ( + "update_time" in df.columns + and df["update_time"].dtype == pl.String + ): + df = df.with_columns( + [ + pl.col("update_time").str.to_datetime(), + ] + ) return df diff --git a/disruptive/resources/organization.py b/disruptive/resources/organization.py index 23685d6..6bcdfad 100644 --- a/disruptive/resources/organization.py +++ b/disruptive/resources/organization.py @@ -39,14 +39,15 @@ def __init__(self, organization: dict) -> None: OutputBase.__init__(self, organization) # Unpack attributes from dictionary. - self.organization_id: str = organization['name'].split('/')[-1] - self.display_name: str = organization['displayName'] + self.organization_id: str = organization["name"].split("/")[-1] + self.display_name: str = organization["displayName"] @classmethod - def get_organization(cls, - organization_id: str, - **kwargs: Any, - ) -> Organization: + def get_organization( + cls, + organization_id: str, + **kwargs: Any, + ) -> Organization: """ Get a single organization. @@ -71,13 +72,15 @@ def get_organization(cls, """ # Construct URL - url = '/organizations/{}'.format(organization_id) + url = "/organizations/{}".format(organization_id) # Return Organization object of GET request response. - return cls(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return cls( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @classmethod def list_organizations(cls, **kwargs: Any) -> list[Organization]: @@ -104,16 +107,17 @@ def list_organizations(cls, **kwargs: Any) -> list[Organization]: # Return list of Organization objects of paginated GET response. orgs = dtrequests.DTRequest.paginated_get( - url='/organizations', - pagination_key='organizations', + url="/organizations", + pagination_key="organizations", **kwargs, ) return [cls(org) for org in orgs] @staticmethod - def list_members(organization_id: str, - **kwargs: Any, - ) -> list[Member]: + def list_members( + organization_id: str, + **kwargs: Any, + ) -> list[Member]: """ Gets a list of all members in an organization. @@ -138,22 +142,23 @@ def list_members(organization_id: str, """ # Construct URL - url = '/organizations/{}/members'.format(organization_id) + url = "/organizations/{}/members".format(organization_id) # Return list of Member objects of paginated GET response. members = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='members', + pagination_key="members", **kwargs, ) return [Member(m) for m in members] @staticmethod - def add_member(organization_id: str, - email: str, - roles: list[str], - **kwargs: Any, - ) -> Member: + def add_member( + organization_id: str, + email: str, + roles: list[str], + **kwargs: Any, + ) -> Member: """ Add a new member to the specified organization. @@ -187,25 +192,28 @@ def add_member(organization_id: str, """ # Construct URL - url = '/organizations/{}/members'.format(organization_id) + url = "/organizations/{}/members".format(organization_id) # Construct request body. body: dict = dict() - body['roles'] = ['roles/' + r for r in roles] - body['email'] = email + body["roles"] = ["roles/" + r for r in roles] + body["email"] = email # Return Member object of POST request response. - return Member(dtrequests.DTRequest.post( - url=url, - body=body, - **kwargs, - )) + return Member( + dtrequests.DTRequest.post( + url=url, + body=body, + **kwargs, + ) + ) @staticmethod - def get_member(member_id: str, - organization_id: str, - **kwargs: Any, - ) -> Member: + def get_member( + member_id: str, + organization_id: str, + **kwargs: Any, + ) -> Member: """ Get a member from the specified organization. @@ -237,22 +245,25 @@ def get_member(member_id: str, """ # Construct URL - url = '/organizations/{}/members/{}'.format( + url = "/organizations/{}/members/{}".format( organization_id, member_id, ) # Return Member object of GET request response. - return Member(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return Member( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @staticmethod - def remove_member(member_id: str, - organization_id: str, - **kwargs: Any, - ) -> None: + def remove_member( + member_id: str, + organization_id: str, + **kwargs: Any, + ) -> None: """ Revoke a member's membership in the specified organization. This does not delete the underlying Service Account or User. @@ -280,7 +291,7 @@ def remove_member(member_id: str, """ # Construct URL - url = '/organizations/{}/members/{}'.format( + url = "/organizations/{}/members/{}".format( organization_id, member_id, ) @@ -292,10 +303,11 @@ def remove_member(member_id: str, ) @staticmethod - def get_member_invite_url(member_id: str, - organization_id: str, - **kwargs: Any, - ) -> str: + def get_member_invite_url( + member_id: str, + organization_id: str, + **kwargs: Any, + ) -> str: """ Get the invite URL for a member with pending invite. @@ -330,22 +342,26 @@ def get_member_invite_url(member_id: str, """ # Construct URL - url = '/organizations/{}/members/{}'.format( - organization_id, - member_id, - ) + ':getInviteUrl' + url = ( + "/organizations/{}/members/{}".format( + organization_id, + member_id, + ) + + ":getInviteUrl" + ) # Return url string in GET response. invite_url: str = dtrequests.DTRequest.get( url=url, **kwargs, - )['inviteUrl'] + )["inviteUrl"] return invite_url @staticmethod - def list_permissions(organization_id: str, - **kwargs: Any, - ) -> list[str]: + def list_permissions( + organization_id: str, + **kwargs: Any, + ) -> list[str]: """ List permissions available in the specified organization. @@ -372,12 +388,12 @@ def list_permissions(organization_id: str, """ # Construct URL - url = '/organizations/{}/permissions'.format(organization_id) + url = "/organizations/{}/permissions".format(organization_id) # Return list of permissions in GET response. permissions: list[str] = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='permissions', + pagination_key="permissions", **kwargs, ) return permissions diff --git a/disruptive/resources/project.py b/disruptive/resources/project.py index 44f1222..e8d6477 100644 --- a/disruptive/resources/project.py +++ b/disruptive/resources/project.py @@ -51,21 +51,23 @@ def __init__(self, project: dict) -> None: OutputBase.__init__(self, project) # Unpack attributes from dictionary. - self.project_id: str = project['name'].split('/')[-1] + self.project_id: str = project["name"].split("/")[-1] self.id: str = self.project_id # Deprecated in favor of project_id. - self.display_name: str = project['displayName'] - self.organization_id: str = project['organization'].split('/')[-1] - self.organization_display_name: str = \ - project['organizationDisplayName'] - self.sensor_count: int = project['sensorCount'] - self.cloud_connector_count: int = project['cloudConnectorCount'] - self.is_inventory: bool = project['inventory'] + self.display_name: str = project["displayName"] + self.organization_id: str = project["organization"].split("/")[-1] + self.organization_display_name: str = project[ + "organizationDisplayName" + ] + self.sensor_count: int = project["sensorCount"] + self.cloud_connector_count: int = project["cloudConnectorCount"] + self.is_inventory: bool = project["inventory"] @classmethod - def get_project(cls, - project_id: str, - **kwargs: Any, - ) -> Project: + def get_project( + cls, + project_id: str, + **kwargs: Any, + ) -> Project: """ Fetch a single project. @@ -90,20 +92,23 @@ def get_project(cls, """ # Construct URL. - url = '/projects/{}'.format(project_id) + url = "/projects/{}".format(project_id) # Return Project object of GET request response. - return cls(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return cls( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @classmethod - def list_projects(cls, - organization_id: Optional[str] = None, - query: Optional[str] = None, - **kwargs: Any, - ) -> list[Project]: + def list_projects( + cls, + organization_id: Optional[str] = None, + query: Optional[str] = None, + **kwargs: Any, + ) -> list[Project]: """ Fetch a list of all available projects. @@ -130,30 +135,31 @@ def list_projects(cls, """ # Construct URL. - url = '/projects' + url = "/projects" # Construct parameters dictionary. params = {} if organization_id is not None: - params['organization'] = 'organizations/' + organization_id + params["organization"] = "organizations/" + organization_id if query is not None: - params['query'] = query + params["query"] = query # Return list of Project objects of paginated GET response. responses = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='projects', + pagination_key="projects", params=params, **kwargs, ) return [cls(r) for r in responses] @classmethod - def create_project(cls, - organization_id: str, - display_name: str, - **kwargs: Any, - ) -> Project: + def create_project( + cls, + organization_id: str, + display_name: str, + **kwargs: Any, + ) -> Project: """ Create a new project in the specified organization. @@ -183,25 +189,28 @@ def create_project(cls, """ # Construct URL. - url = '/projects' + url = "/projects" # Construct request body. body: dict = dict() - body['organization'] = 'organizations/' + organization_id - body['displayName'] = display_name + body["organization"] = "organizations/" + organization_id + body["displayName"] = display_name # Return Project object of POST request response. - return cls(dtrequests.DTRequest.post( - url=url, - body=body, - **kwargs, - )) + return cls( + dtrequests.DTRequest.post( + url=url, + body=body, + **kwargs, + ) + ) @staticmethod - def update_project(project_id: str, - display_name: Optional[str] = None, - **kwargs: Any, - ) -> None: + def update_project( + project_id: str, + display_name: Optional[str] = None, + **kwargs: Any, + ) -> None: """ Updates the display name a specified project. @@ -226,12 +235,12 @@ def update_project(project_id: str, """ # Construct URL. - url = '/projects/' + project_id + url = "/projects/" + project_id # Construct request body. body = {} if display_name is not None: - body['displayName'] = display_name + body["displayName"] = display_name # Send PATCH request, but return nothing. dtrequests.DTRequest.patch( @@ -241,9 +250,10 @@ def update_project(project_id: str, ) @staticmethod - def delete_project(project_id: str, - **kwargs: Any, - ) -> None: + def delete_project( + project_id: str, + **kwargs: Any, + ) -> None: """ Deletes the specified project. @@ -271,7 +281,7 @@ def delete_project(project_id: str, """ # Construct URL. - url = '/projects/' + project_id + url = "/projects/" + project_id # Send DELETE request, but return nothing. dtrequests.DTRequest.delete( @@ -280,9 +290,10 @@ def delete_project(project_id: str, ) @staticmethod - def list_members(project_id: str, - **kwargs: Any, - ) -> list[Member]: + def list_members( + project_id: str, + **kwargs: Any, + ) -> list[Member]: """ Gets a list of all members in a project. @@ -307,23 +318,24 @@ def list_members(project_id: str, """ # Construct URL - url = '/projects/{}/members'.format(project_id) + url = "/projects/{}/members".format(project_id) # Return list of Member objects of paginated GET response. members = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='members', + pagination_key="members", **kwargs, ) members_list: list[Member] = [Member(m) for m in members] return members_list @staticmethod - def add_member(project_id: str, - email: str, - roles: list[str], - **kwargs: Any, - ) -> Member: + def add_member( + project_id: str, + email: str, + roles: list[str], + **kwargs: Any, + ) -> Member: """ Add a new member to the specified project. @@ -357,25 +369,28 @@ def add_member(project_id: str, """ # Construct URL - url = '/projects/{}/members'.format(project_id) + url = "/projects/{}/members".format(project_id) # Construct request body. body: dict = dict() - body['roles'] = ['roles/' + r for r in roles] - body['email'] = email + body["roles"] = ["roles/" + r for r in roles] + body["email"] = email # Return Member object of POST request response. - return Member(dtrequests.DTRequest.post( - url=url, - body=body, - **kwargs, - )) + return Member( + dtrequests.DTRequest.post( + url=url, + body=body, + **kwargs, + ) + ) @staticmethod - def get_member(member_id: str, - project_id: str, - **kwargs: Any, - ) -> Member: + def get_member( + member_id: str, + project_id: str, + **kwargs: Any, + ) -> Member: """ Get a single member in the specified project. @@ -407,23 +422,26 @@ def get_member(member_id: str, """ # Construct URL - url = '/projects/{}/members/{}'.format( + url = "/projects/{}/members/{}".format( project_id, member_id, ) # Return Member object of GET request response. - return Member(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return Member( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @staticmethod - def update_member(member_id: str, - project_id: str, - roles: list[str], - **kwargs: Any, - ) -> Member: + def update_member( + member_id: str, + project_id: str, + roles: list[str], + **kwargs: Any, + ) -> Member: """ Update the role(s) of the specified member. @@ -458,7 +476,7 @@ def update_member(member_id: str, """ # Construct URL - url = '/projects/{}/members/{}'.format( + url = "/projects/{}/members/{}".format( project_id, member_id, ) @@ -466,20 +484,23 @@ def update_member(member_id: str, # Construct request body. body: dict = dict() if roles is not None: - body['roles'] = ['roles/' + r for r in roles] + body["roles"] = ["roles/" + r for r in roles] # Return updated Member object of PATCH request response. - return Member(dtrequests.DTRequest.patch( - url=url, - body=body, - **kwargs, - )) + return Member( + dtrequests.DTRequest.patch( + url=url, + body=body, + **kwargs, + ) + ) @staticmethod - def remove_member(member_id: str, - project_id: str, - **kwargs: Any, - ) -> None: + def remove_member( + member_id: str, + project_id: str, + **kwargs: Any, + ) -> None: """ Revoke a member's membership in the specified project. This does not delete the underlying Service Account or User. @@ -507,7 +528,7 @@ def remove_member(member_id: str, """ # Construct URL - url = '/projects/{}/members/{}'.format( + url = "/projects/{}/members/{}".format( project_id, member_id, ) @@ -519,10 +540,11 @@ def remove_member(member_id: str, ) @staticmethod - def get_member_invite_url(member_id: str, - project_id: str, - **kwargs: Any, - ) -> str: + def get_member_invite_url( + member_id: str, + project_id: str, + **kwargs: Any, + ) -> str: """ Get the invite URL for a member with pending invite. @@ -562,22 +584,26 @@ def get_member_invite_url(member_id: str, """ # Construct URL - url = '/projects/{}/members/{}'.format( - project_id, - member_id, - ) + ':getInviteUrl' + url = ( + "/projects/{}/members/{}".format( + project_id, + member_id, + ) + + ":getInviteUrl" + ) # Return url string in GET response. invite_url: str = dtrequests.DTRequest.get( url=url, **kwargs, - )['inviteUrl'] + )["inviteUrl"] return invite_url @staticmethod - def list_permissions(project_id: str, - **kwargs: Any, - ) -> list[str]: + def list_permissions( + project_id: str, + **kwargs: Any, + ) -> list[str]: """ List permissions available to the caller in the specified project. @@ -602,12 +628,12 @@ def list_permissions(project_id: str, """ # Construct URL - url = '/projects/{}/permissions'.format(project_id) + url = "/projects/{}/permissions".format(project_id) # Return list of permissions in GET response. permissions: list[str] = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='permissions', + pagination_key="permissions", **kwargs, ) return permissions diff --git a/disruptive/resources/role.py b/disruptive/resources/role.py index 417f907..4cb6613 100644 --- a/disruptive/resources/role.py +++ b/disruptive/resources/role.py @@ -29,12 +29,16 @@ class Role(OutputBase): """ # Constants for the available roles. - PROJECT_USER = 'project.user' - PROJECT_DEVELOPER = 'project.developer' - PROJECT_ADMIN = 'project.admin' - ORGANIZATION_ADMIN = 'organization.admin' - ROLES = [PROJECT_USER, PROJECT_DEVELOPER, PROJECT_ADMIN, - ORGANIZATION_ADMIN] + PROJECT_USER = "project.user" + PROJECT_DEVELOPER = "project.developer" + PROJECT_ADMIN = "project.admin" + ORGANIZATION_ADMIN = "organization.admin" + ROLES = [ + PROJECT_USER, + PROJECT_DEVELOPER, + PROJECT_ADMIN, + ORGANIZATION_ADMIN, + ] def __init__(self, role: dict) -> None: """ @@ -51,16 +55,17 @@ def __init__(self, role: dict) -> None: OutputBase.__init__(self, role) # Unpack attributes from dictionary. - self.role: str = role['name'].split('/')[-1] - self.display_name: str = role['displayName'] - self.description: str = role['description'] - self.permissions: list[str] = role['permissions'] + self.role: str = role["name"].split("/")[-1] + self.display_name: str = role["displayName"] + self.description: str = role["description"] + self.permissions: list[str] = role["permissions"] @classmethod - def get_role(cls, - role: str, - **kwargs: Any, - ) -> Role: + def get_role( + cls, + role: str, + **kwargs: Any, + ) -> Role: """ Gets a role specified by its name. @@ -85,10 +90,12 @@ def get_role(cls, """ # Return list of Role objects. - return cls(dtrequests.DTRequest.get( - url='/roles/' + role, - **kwargs, - )) + return cls( + dtrequests.DTRequest.get( + url="/roles/" + role, + **kwargs, + ) + ) @classmethod def list_roles(cls, **kwargs: Any) -> list[Role]: @@ -115,8 +122,8 @@ def list_roles(cls, **kwargs: Any) -> list[Role]: # Return list of Role objects. response = dtrequests.DTRequest.paginated_get( - url='/roles', - pagination_key='roles', + url="/roles", + pagination_key="roles", **kwargs, ) return [cls(r) for r in response] diff --git a/disruptive/resources/service_account.py b/disruptive/resources/service_account.py index ea1ee52..f2c333e 100644 --- a/disruptive/resources/service_account.py +++ b/disruptive/resources/service_account.py @@ -49,21 +49,24 @@ def __init__(self, service_account: dict) -> None: dtoutputs.OutputBase.__init__(self, service_account) # Unpack attributes from dictionary. - self.service_account_id: str = service_account['name'].split('/')[-1] - self.email: str = service_account['email'] - self.display_name: str = service_account['displayName'] - self.basic_auth_enabled: bool = service_account['enableBasicAuth'] - self.create_time: Optional[datetime] = \ - dttrans.to_datetime(service_account['createTime']) - self.update_time: Optional[datetime] = \ - dttrans.to_datetime(service_account['updateTime']) + self.service_account_id: str = service_account["name"].split("/")[-1] + self.email: str = service_account["email"] + self.display_name: str = service_account["displayName"] + self.basic_auth_enabled: bool = service_account["enableBasicAuth"] + self.create_time: Optional[datetime] = dttrans.to_datetime( + service_account["createTime"] + ) + self.update_time: Optional[datetime] = dttrans.to_datetime( + service_account["updateTime"] + ) @classmethod - def get_service_account(cls, - service_account_id: str, - project_id: str, - **kwargs: Any, - ) -> ServiceAccount: + def get_service_account( + cls, + service_account_id: str, + project_id: str, + **kwargs: Any, + ) -> ServiceAccount: """ Gets the current state of a single Service Account. @@ -93,22 +96,25 @@ def get_service_account(cls, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}'.format( + url = "/projects/{}/serviceaccounts/{}".format( project_id, service_account_id, ) # Return ServiceAccount object of GET request response. - return cls(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return cls( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @classmethod - def list_service_accounts(cls, - project_id: str, - **kwargs: Any, - ) -> list[ServiceAccount]: + def list_service_accounts( + cls, + project_id: str, + **kwargs: Any, + ) -> list[ServiceAccount]: """ Gets a list of the current state of all Service Accounts in a project. @@ -135,23 +141,24 @@ def list_service_accounts(cls, """ # Construct URL. - url = '/projects/{}/serviceaccounts'.format(project_id) + url = "/projects/{}/serviceaccounts".format(project_id) # Return list of ServiceAccount objects of paginated GET response. service_accounts = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='serviceAccounts', + pagination_key="serviceAccounts", **kwargs, ) return [cls(sa) for sa in service_accounts] @classmethod - def create_service_account(cls, - project_id: str, - display_name: str = '', - basic_auth_enabled: bool = False, - **kwargs: Any, - ) -> ServiceAccount: + def create_service_account( + cls, + project_id: str, + display_name: str = "", + basic_auth_enabled: bool = False, + **kwargs: Any, + ) -> ServiceAccount: """ Create a new Service Account in the specified project. @@ -185,29 +192,32 @@ def create_service_account(cls, """ # Construct URL. - url = '/projects/{}/serviceaccounts'.format(project_id) + url = "/projects/{}/serviceaccounts".format(project_id) # Construct body. body: dict = dict() - body['enableBasicAuth'] = basic_auth_enabled + body["enableBasicAuth"] = basic_auth_enabled if len(display_name) > 0: - body['displayName'] = display_name + body["displayName"] = display_name # Return ServiceAccount object of GET request response. - return cls(dtrequests.DTRequest.post( - url=url, - body=body, - **kwargs, - )) + return cls( + dtrequests.DTRequest.post( + url=url, + body=body, + **kwargs, + ) + ) @classmethod - def update_service_account(cls, - service_account_id: str, - project_id: str, - display_name: Optional[str] = None, - basic_auth_enabled: Optional[bool] = None, - **kwargs: Any, - ) -> ServiceAccount: + def update_service_account( + cls, + service_account_id: str, + project_id: str, + display_name: Optional[str] = None, + basic_auth_enabled: Optional[bool] = None, + **kwargs: Any, + ) -> ServiceAccount: """ Updates the attributes of a specified Service Account. @@ -250,7 +260,7 @@ def update_service_account(cls, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}'.format( + url = "/projects/{}/serviceaccounts/{}".format( project_id, service_account_id, ) @@ -258,23 +268,26 @@ def update_service_account(cls, # Construct body. body: dict = dict() if display_name is not None: - body['displayName'] = display_name + body["displayName"] = display_name if basic_auth_enabled is not None: - body['enableBasicAuth'] = basic_auth_enabled + body["enableBasicAuth"] = basic_auth_enabled # Return ServiceAccount object of GET request response. - return cls(dtrequests.DTRequest.patch( - url=url, - body=body, - **kwargs, - )) + return cls( + dtrequests.DTRequest.patch( + url=url, + body=body, + **kwargs, + ) + ) @classmethod - def delete_service_account(cls, - service_account_id: str, - project_id: str, - **kwargs: Any, - ) -> None: + def delete_service_account( + cls, + service_account_id: str, + project_id: str, + **kwargs: Any, + ) -> None: """ Deletes the specified Service Account. @@ -299,7 +312,7 @@ def delete_service_account(cls, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}'.format( + url = "/projects/{}/serviceaccounts/{}".format( project_id, service_account_id, ) @@ -311,11 +324,12 @@ def delete_service_account(cls, ) @staticmethod - def get_key(key_id: str, - service_account_id: str, - project_id: str, - **kwargs: Any, - ) -> Key: + def get_key( + key_id: str, + service_account_id: str, + project_id: str, + **kwargs: Any, + ) -> Key: """ Get the key of a Service Account. @@ -348,23 +362,26 @@ def get_key(key_id: str, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}/keys/{}'.format( + url = "/projects/{}/serviceaccounts/{}/keys/{}".format( project_id, service_account_id, key_id, ) # Return Key object of GET request response. - return Key(dtrequests.DTRequest.get( - url=url, - **kwargs, - )) + return Key( + dtrequests.DTRequest.get( + url=url, + **kwargs, + ) + ) @staticmethod - def list_keys(service_account_id: str, - project_id: str, - **kwargs: Any, - ) -> list[Key]: + def list_keys( + service_account_id: str, + project_id: str, + **kwargs: Any, + ) -> list[Key]: """ Get a list of all keys for a Service Account. @@ -394,7 +411,7 @@ def list_keys(service_account_id: str, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}/keys'.format( + url = "/projects/{}/serviceaccounts/{}/keys".format( project_id, service_account_id, ) @@ -402,16 +419,17 @@ def list_keys(service_account_id: str, # Return list of Key objects of paginated GET response. keys = dtrequests.DTRequest.paginated_get( url=url, - pagination_key='keys', + pagination_key="keys", **kwargs, ) return [Key(key) for key in keys] @staticmethod - def create_key(service_account_id: str, - project_id: str, - **kwargs: Any, - ) -> Key: + def create_key( + service_account_id: str, + project_id: str, + **kwargs: Any, + ) -> Key: """ Create a new key for the specified Service Account. @@ -441,7 +459,7 @@ def create_key(service_account_id: str, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}/keys'.format( + url = "/projects/{}/serviceaccounts/{}/keys".format( project_id, service_account_id, ) @@ -454,11 +472,12 @@ def create_key(service_account_id: str, return Key._with_secret(response) @staticmethod - def delete_key(key_id: str, - service_account_id: str, - project_id: str, - **kwargs: Any, - ) -> None: + def delete_key( + key_id: str, + service_account_id: str, + project_id: str, + **kwargs: Any, + ) -> None: """ Deletes a key in the specified Service Account. @@ -486,7 +505,7 @@ def delete_key(key_id: str, """ # Construct URL. - url = '/projects/{}/serviceaccounts/{}/keys/{}'.format( + url = "/projects/{}/serviceaccounts/{}/keys/{}".format( project_id, service_account_id, key_id, @@ -538,11 +557,12 @@ def __init__(self, key: dict) -> None: self.secret: Optional[str] = None # Unpack attributes from dictionary. - self.key_id: str = key['id'] - self.create_time: Optional[datetime] = \ - dttrans.to_datetime(key['createTime']) - if 'secret' in key: - self.secret = key['secret'] + self.key_id: str = key["id"] + self.create_time: Optional[datetime] = dttrans.to_datetime( + key["createTime"] + ) + if "secret" in key: + self.secret = key["secret"] @classmethod def _with_secret(cls, key: dict) -> Key: @@ -565,6 +585,6 @@ def _with_secret(cls, key: dict) -> Key: """ - flattened = key['key'] - flattened['secret'] = key['secret'] + flattened = key["key"] + flattened["secret"] = key["secret"] return cls(flattened) diff --git a/disruptive/resources/stream.py b/disruptive/resources/stream.py index 9f6e027..cfafd77 100644 --- a/disruptive/resources/stream.py +++ b/disruptive/resources/stream.py @@ -6,7 +6,7 @@ from disruptive.events.events import Event -class Stream(): +class Stream: """ Contains staticmethods for streaming events. Used for namespacing only and thus does not have a constructor @@ -14,13 +14,14 @@ class Stream(): """ @staticmethod - def event_stream(project_id: str, - device_ids: Optional[list[str]] = None, - label_filters: Optional[dict] = None, - device_types: Optional[list[str]] = None, - event_types: Optional[list[str]] = None, - **kwargs: Any, - ) -> Generator: + def event_stream( + project_id: str, + device_ids: Optional[list[str]] = None, + label_filters: Optional[dict] = None, + device_types: Optional[list[str]] = None, + event_types: Optional[list[str]] = None, + **kwargs: Any, + ) -> Generator: """ Stream events for one, multiple, or all device(s) in a project. @@ -88,21 +89,21 @@ def event_stream(project_id: str, # Construct parameters dictionary. params: dict = dict() if device_ids is not None: - params['device_ids'] = device_ids + params["device_ids"] = device_ids if device_types is not None: - params['device_types'] = device_types + params["device_types"] = device_types if label_filters is not None: - params['label_filters'] = [] + params["label_filters"] = [] for key in label_filters: if isinstance(label_filters[key], str): - string = key + '=' + label_filters[key] - params['label_filters'].append(string) + string = key + "=" + label_filters[key] + params["label_filters"].append(string) else: - params['label_filters'].append(key) + params["label_filters"].append(key) if event_types is not None: - params['event_types'] = event_types + params["event_types"] = event_types # Relay generator output. - url = '/projects/{}/devices:stream'.format(project_id) + url = "/projects/{}/devices:stream".format(project_id) for event in dtrequests.DTRequest.stream(url, params=params, **kwargs): yield Event(event) diff --git a/disruptive/transforms.py b/disruptive/transforms.py index 78f1d30..fa45f5c 100644 --- a/disruptive/transforms.py +++ b/disruptive/transforms.py @@ -9,9 +9,9 @@ def base64_encode(string: str) -> str: - string_bytes = string.encode('ascii') + string_bytes = string.encode("ascii") base64_bytes = base64.b64encode(string_bytes) - base64_string = base64_bytes.decode('ascii') + base64_string = base64_bytes.decode("ascii") return base64_string @@ -24,18 +24,20 @@ def to_iso8601(ts: Optional[str | datetime]) -> Optional[str]: return ts else: # Invalid iso8601 format, raise error. - msg = 'Timestamp format <{}> is invalid iso8601 format.\n' \ - 'Example: 2020-01-01T00:00:00Z'.format(ts) + msg = ( + "Timestamp format <{}> is invalid iso8601 format.\n" + "Example: 2020-01-01T00:00:00Z".format(ts) + ) raise dterrors.FormatError(msg) # If not string, datetime is also fine as it can be converted. elif isinstance(ts, datetime): if ts.tzinfo is None: - dt = ts.isoformat() + 'Z' + dt = ts.isoformat() + "Z" else: dt = ts.isoformat() - if dt.endswith('+00:00'): - dt = dt.replace('+00:00', 'Z') + if dt.endswith("+00:00"): + dt = dt.replace("+00:00", "Z") return dt @@ -45,8 +47,10 @@ def to_iso8601(ts: Optional[str | datetime]) -> Optional[str]: # If any other type, raise TypeError. else: - msg = 'Got timestamp of type <{}>, expected ' \ - 'iso8601 or .'.format(type(ts).__name__) + msg = ( + "Got timestamp of type <{}>, expected " + "iso8601 or .".format(type(ts).__name__) + ) raise dterrors._raise_builtin(TypeError, msg) @@ -61,11 +65,13 @@ def to_datetime(ts: Optional[str | datetime]) -> Optional[datetime]: # First, verify if string is valid iso8601 format. if validate_iso8601_format(ts): # Use built-in functions for converting to datetime. - return datetime.fromisoformat(ts.replace('Z', '+00:00')) + return datetime.fromisoformat(ts.replace("Z", "+00:00")) else: # Invalid iso8601 format, raise error. - msg = f'Timestamp format [{ts}] is invalid iso8601 format.\n' \ - 'Example: 2020-01-01T00:00:00Z' + msg = ( + f"Timestamp format [{ts}] is invalid iso8601 format.\n" + "Example: 2020-01-01T00:00:00Z" + ) raise dterrors.FormatError(msg) # If ts is None, return None. @@ -74,10 +80,10 @@ def to_datetime(ts: Optional[str | datetime]) -> Optional[datetime]: # If any other type, raise TypeError. else: - msg = 'Got timestamp of type <{}>, expected ' \ - 'iso8601 or .'.format( - type(ts).__name__ - ) + msg = ( + "Got timestamp of type <{}>, expected " + "iso8601 or .".format(type(ts).__name__) + ) raise dterrors._raise_builtin(TypeError, msg) @@ -88,9 +94,11 @@ def validate_iso8601_format(dt_str: str) -> bool: # that the datetime built-in method for checking iso8601 format # allows missing timezone infromation (i.e. Z or +-00:00 suffix). # This must be included in our API, and is why this regex exists. - iso8601_regex = r'^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-' \ - '(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):' \ - '([0-5][0-9])?(.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])$' + iso8601_regex = ( + r"^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-" + "(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):" + "([0-5][0-9])?(.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])$" + ) match_iso8601 = re.compile(iso8601_regex).match if match_iso8601(dt_str) is not None: @@ -115,8 +123,8 @@ def _celsius_to_fahrenheit(celsius: float) -> float: """ - return (celsius * (9/5)) + 32 + return (celsius * (9 / 5)) + 32 def camel_to_snake_case(x: str) -> str: - return re.sub(r'(? 15: - print('\t- pop') + print("\t- pop") event_buffer.pop(0) # Patiently wait for 5 seconds. diff --git a/pyproject.toml b/pyproject.toml index ee46d6c..0e9e86a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,8 +2,5 @@ requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" -[tool.black] -force-exclude = '.*' - -[tool.ruff.format] -exclude = ["*"] \ No newline at end of file +[tool.ruff] +line-length = 79 \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 327a87e..b572406 100644 --- a/setup.cfg +++ b/setup.cfg @@ -52,7 +52,7 @@ dev = pytest-mock>=3.14.0 pytest-cov>=5.0.0 mypy>=1.11.2 - flake8>=7.1.1 + ruff>=0.9.6 extra = pandas >= 2.0.0, < 3.0.0 diff --git a/tests/api_responses.py b/tests/api_responses.py index aa69778..309e8f3 100644 --- a/tests/api_responses.py +++ b/tests/api_responses.py @@ -1,74 +1,52 @@ -auth_token_fresh = { - 'expires_in': 3600, - 'access_token': '' -} +auth_token_fresh = {"expires_in": 3600, "access_token": ""} -auth_token_expired = { - 'expires_in': 0, - 'access_token': '' -} +auth_token_expired = {"expires_in": 0, "access_token": ""} ccon = { "name": "projects/c0md8mm0c7bet3vic78g/devices/emuc909fk1qdqebrvv2jqv0", "type": "ccon", "productNumber": "100011", - "labels": { - "name": "ccon", - "virtual-sensor": "" - }, + "labels": {"name": "ccon", "virtual-sensor": ""}, "reported": { "connectionStatus": { "connection": "CELLULAR", - "available": [ - "ETHERNET", - "CELLULAR" - ], - "updateTime": "2021-03-13T16:05:21.692975Z" + "available": ["ETHERNET", "CELLULAR"], + "updateTime": "2021-03-13T16:05:21.692975Z", }, "connectionLatency": None, "ethernetStatus": { "macAddress": "", "ipAddress": "", "errors": [], - "updateTime": "2021-03-13T16:05:25.552018Z" + "updateTime": "2021-03-13T16:05:25.552018Z", }, "cellularStatus": { "signalStrength": 100, "errors": [], - "updateTime": "2021-03-13T16:05:23.230101Z" + "updateTime": "2021-03-13T16:05:23.230101Z", }, - "touch": { - "updateTime": "2021-03-13T16:05:27.591228Z" - } - } + "touch": {"updateTime": "2021-03-13T16:05:27.591228Z"}, + }, } humidity_sensor = { "name": "projects/c0md38m0c7bet4vico8g/devices/emuc109fhppdqebrvv2jqug", "type": "humidity", "productNumber": "101895", - "labels": { - "name": "humidity", - "new-label": "99", - "virtual-sensor": "" - }, + "labels": {"name": "humidity", "new-label": "99", "virtual-sensor": ""}, "reported": { "networkStatus": { "signalStrength": 99, "rssi": 0, "updateTime": "2021-03-13T16:05:35.392072Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": 0 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": 0} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:05:31.745319Z" + "updateTime": "2021-03-13T16:05:31.745319Z", }, "humidity": { "temperature": 0, @@ -77,52 +55,41 @@ { "temperature": 0, "relativeHumidity": 0, - "sampleTime": "2021-03-13T16:05:30.185800Z" + "sampleTime": "2021-03-13T16:05:30.185800Z", } ], "isBackfilled": False, - "updateTime": "2021-03-13T16:05:30.185800Z" + "updateTime": "2021-03-13T16:05:30.185800Z", }, - "touch": { - "updateTime": "2021-03-13T16:05:35.380533Z" - } - } + "touch": {"updateTime": "2021-03-13T16:05:35.380533Z"}, + }, } proximity_sensor = { "name": "projects/c0md3mm0c7pet3vico8g/devices/emuc0pc36pqdqebrvv29r8g", "type": "proximity", "productNumber": "102064", - "labels": { - "name": "proximity", - "virtual-sensor": "" - }, + "labels": {"name": "proximity", "virtual-sensor": ""}, "reported": { "networkStatus": { "signalStrength": 99, "rssi": 0, "updateTime": "2021-03-13T16:05:45.289219Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": 0 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": 0} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:05:41.082485Z" + "updateTime": "2021-03-13T16:05:41.082485Z", }, "objectPresent": { "state": "PRESENT", - "updateTime": "2021-03-13T16:05:39.474908Z" + "updateTime": "2021-03-13T16:05:39.474908Z", }, - "touch": { - "updateTime": "2021-03-13T16:05:45.281488Z" - } - } + "touch": {"updateTime": "2021-03-13T16:05:45.281488Z"}, + }, } temperature_sensor = { @@ -139,174 +106,128 @@ "rssi": 0, "updateTime": "2021-03-13T16:05:53.021835Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": 0 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": 0} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:05:49.380240Z" + "updateTime": "2021-03-13T16:05:49.380240Z", }, "temperature": { "value": -27, "isBackfilled": False, "samples": [ - { - "value": -27, - "sampleTime": "2021-03-13T16:05:47.722334Z" - } + {"value": -27, "sampleTime": "2021-03-13T16:05:47.722334Z"} ], - "updateTime": "2021-03-13T16:05:47.722334Z" + "updateTime": "2021-03-13T16:05:47.722334Z", }, - "touch": { - "updateTime": "2021-03-13T16:05:53.015325Z" - } - } + "touch": {"updateTime": "2021-03-13T16:05:53.015325Z"}, + }, } touch_sensor = { "name": "projects/c0md3mm0c7pet3vico8g/devices/emucpuc989qdqebrvv29so0", "type": "touch", "productNumber": "100110", - "labels": { - "name": "touch", - "new-label": "99", - "virtual-sensor": "" - }, + "labels": {"name": "touch", "new-label": "99", "virtual-sensor": ""}, "reported": { "networkStatus": { "signalStrength": 99, "rssi": -50, "updateTime": "2021-03-13T16:05:58.421952Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": -50 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": -50} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:05:56.684645Z" + "updateTime": "2021-03-13T16:05:56.684645Z", }, - "touch": { - "updateTime": "2021-03-13T16:05:55.084433Z" - } - } + "touch": {"updateTime": "2021-03-13T16:05:55.084433Z"}, + }, } water_present_sensor = { "name": "projects/c0md3mmpc7bet3vico8g/devices/emucpppobpqdqebrvv1iqog", "type": "waterDetector", "productNumber": "101714", - "labels": { - "name": "water", - "virtual-sensor": "" - }, + "labels": {"name": "water", "virtual-sensor": ""}, "reported": { "networkStatus": { "signalStrength": 99, "rssi": 0, "updateTime": "2021-03-13T16:06:05.532940Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": 0 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": 0} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:06:03.571156Z" + "updateTime": "2021-03-13T16:06:03.571156Z", }, "waterPresent": { "state": "NOT_PRESENT", - "updateTime": "2021-03-13T16:06:00.157924Z" + "updateTime": "2021-03-13T16:06:00.157924Z", }, - "touch": { - "updateTime": "2021-03-13T16:06:05.526762Z" - } - } + "touch": {"updateTime": "2021-03-13T16:06:05.526762Z"}, + }, } proximity_counter_sensor = { "name": "projects/c0md3mmpc7bet3vico8g/devices/emucp6e7qvlq0bgk44sg46g", "type": "proximityCounter", "productNumber": "101730", - "labels": { - "name": "proximity counter", - "virtual-sensor": "" - }, + "labels": {"name": "proximity counter", "virtual-sensor": ""}, "reported": { "networkStatus": { "signalStrength": 99, "rssi": 0, "updateTime": "2021-03-13T16:10:32.198962Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": 0 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": 0} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:10:27.026545Z" + "updateTime": "2021-03-13T16:10:27.026545Z", }, "objectPresentCount": { "total": 55, - "updateTime": "2021-03-13T16:10:25.184478Z" + "updateTime": "2021-03-13T16:10:25.184478Z", }, - "touch": { - "updateTime": "2021-03-13T16:10:32.184589Z" - } - } + "touch": {"updateTime": "2021-03-13T16:10:32.184589Z"}, + }, } touch_count_sensor = { "name": "projects/c0md3mmpc7bet3vico8g/devices/emucp6e8dnlq0bgk44sg4c0", "type": "touchCounter", "productNumber": "101675", - "labels": { - "name": "touch counter", - "virtual-sensor": "" - }, + "labels": {"name": "touch counter", "virtual-sensor": ""}, "reported": { "networkStatus": { "signalStrength": 99, "rssi": 0, "updateTime": "2021-03-13T16:11:43.908965Z", "cloudConnectors": [ - { - "id": "emulated-ccon", - "signalStrength": 99, - "rssi": 0 - } + {"id": "emulated-ccon", "signalStrength": 99, "rssi": 0} ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", }, "batteryStatus": { "percentage": 100, - "updateTime": "2021-03-13T16:11:40.092811Z" + "updateTime": "2021-03-13T16:11:40.092811Z", }, "touchCount": { "total": 33, - "updateTime": "2021-03-13T16:11:37.365770Z" + "updateTime": "2021-03-13T16:11:37.365770Z", }, - "touch": { - "updateTime": "2021-03-13T16:11:43.902157Z" - } - } + "touch": {"updateTime": "2021-03-13T16:11:43.902157Z"}, + }, } null_reported_sensor = { @@ -314,14 +235,14 @@ "type": "temperature", "labels": { "name": "Emulated temperature: emuc16e9nvlq0bgk44sg4o0", - "virtual-sensor": "" + "virtual-sensor": "", }, "reported": { "networkStatus": None, # The REST API will return "null" for "batteryStatus": None, # these values if they are yet to be set. - "temperature": None, # This is converted to None in python, which - "touch": None # is why we set that here. - } + "temperature": None, # This is converted to None in python, which + "touch": None, # is why we set that here. + }, } unknown_reported_sensor = { @@ -330,15 +251,15 @@ "productNumber": "", "labels": { "name": "Emulated temperature: emuc16e9nvlq0bgk44sg4o0", - "virtual-sensor": "" + "virtual-sensor": "", }, "reported": { "networkStatus": None, # The REST API will return "null" for "batteryStatus": None, # these values if they are yet to be set. - "temperature": None, # This is converted to None in python, which - "touch": None, # is why we set that here. - "does_not_exist": {'key1': 'value1', 'key2': 'value2'}, - } + "temperature": None, # This is converted to None in python, which + "touch": None, # is why we set that here. + "does_not_exist": {"key1": "value1", "key2": "value2"}, + }, } all_devices_list = [ @@ -354,76 +275,64 @@ ] paginated_device_response = { - 'nextPageToken': '', - 'devices': all_devices_list, + "nextPageToken": "", + "devices": all_devices_list, } simple_data_connector = { "name": "projects/c0md3mm0c7pet3vico8g/" - + "dataconnectors/c16eegpdidie7lltpefg", + + "dataconnectors/c16eegpdidie7lltpefg", "displayName": "my-new-dcon", "type": "HTTP_PUSH", "status": "ACTIVE", "events": [], - "labels": [ - "name" - ], + "labels": ["name"], "httpConfig": { "url": "https://584087e0a1fa.eu.ngrok.io/api/endpoint", "signatureSecret": "", - "headers": {} - } + "headers": {}, + }, } unknown_data_connector = { "name": "projects/c0md3mm0c7pet3vico8g/" - + "dataconnectors/c16eegpdidie7lltpefg", + + "dataconnectors/c16eegpdidie7lltpefg", "displayName": "my-new-dcon", "type": "unknown", "status": "ACTIVE", "events": [], - "labels": [ - "name" - ], + "labels": ["name"], "httpConfig": { "url": "https://584087e0a1fa.eu.ngrok.io/api/endpoint", "signatureSecret": "", - "headers": {} - } + "headers": {}, + }, } configured_data_connector = { "name": "projects/c0md3pm0p7bet3vico8g/" - + "dataconnectors/c16pegipidie7lltrefg", + + "dataconnectors/c16pegipidie7lltrefg", "displayName": "my-new-dcon", "type": "HTTP_PUSH", "status": "ACTIVE", - "events": [ - "touch", - "temperature", - "objectPresent" - ], - "labels": [ - "name", - "custom-label-01", - "custom-label-02" - ], + "events": ["touch", "temperature", "objectPresent"], + "labels": ["name", "custom-label-01", "custom-label-02"], "httpConfig": { "url": "https://584087e0a1fa.eu.ngrok.io/api/endpoint", "signatureSecret": "some-very-good-secret", "headers": { "another-header": "header-contents", - "some-header": "abc123" - } - } + "some-header": "abc123", + }, + }, } paginated_data_connectors_response = { - 'nextPageToken': '', - 'dataConnectors': [ + "nextPageToken": "", + "dataConnectors": [ simple_data_connector, configured_data_connector, - ] + ], } small_project = { @@ -433,7 +342,7 @@ "organizationDisplayName": "some-display-name", "sensorCount": 7, "cloudConnectorCount": 1, - "inventory": False + "inventory": False, } empty_project = { @@ -443,144 +352,134 @@ "organizationDisplayName": "some-display-name", "sensorCount": 0, "cloudConnectorCount": 0, - "inventory": True + "inventory": True, } -projects = { - 'nextPageToken': '', - 'projects': [ - small_project, - empty_project - ] -} +projects = {"nextPageToken": "", "projects": [small_project, empty_project]} project_permissions = { - 'nextPageToken': '', - 'permissions': [ - 'sensor.update', - 'serviceaccount.read', - 'dataconnector.read', - 'serviceaccount.key.read', - 'project.read', - 'emulator.create', - 'sensor.read', - 'serviceaccount.key.create', - ] + "nextPageToken": "", + "permissions": [ + "sensor.update", + "serviceaccount.read", + "dataconnector.read", + "serviceaccount.key.read", + "project.read", + "emulator.create", + "sensor.read", + "serviceaccount.key.create", + ], } organization = { "name": "organizations/c10hussoss90036gu54g", - "displayName": "some-display-name" + "displayName": "some-display-name", } organizations = { - 'nextPageToken': '', - 'organizations': [ + "nextPageToken": "", + "organizations": [ organization, organization, organization, - ] + ], } organization_permissions = { - 'nextPageToken': '', - 'permissions': [ - 'project.create', - 'membership.create', - 'membership.read', - 'membership.update', - 'organization.update', - 'organization.read', - 'membership.delete', - ] + "nextPageToken": "", + "permissions": [ + "project.create", + "membership.create", + "membership.read", + "membership.update", + "organization.update", + "organization.read", + "membership.delete", + ], } service_account1 = { "name": "projects/c14u9q095l47ccv1o3pg/" - + "serviceaccounts/c14uar7915fg90c8lfp0", + + "serviceaccounts/c14uar7915fg90c8lfp0", "email": "c15uar7915fg13c8lfp0@c15u9p094l47cdv1o3qg." - + "serviceaccount.d21s.com", + + "serviceaccount.d21s.com", "displayName": "service-account-1", "enableBasicAuth": False, "createTime": "2021-03-11T09:39:56.015971Z", - "updateTime": "2021-03-11T09:39:56.103249Z" + "updateTime": "2021-03-11T09:39:56.103249Z", } service_account2 = { "name": "projects/c14u88094l47cdv1o3pg/" - + "serviceaccounts/c17m9hm914gg00c8levg", + + "serviceaccounts/c17m9hm914gg00c8levg", "email": "c17m8hn915gg00c8levg@c24u9p094l47cdv1o2pg." - + "serviceaccount.d21s.com", + + "serviceaccount.d21s.com", "displayName": "service-account-2", "enableBasicAuth": True, "createTime": "2021-03-15T13:44:38.974097Z", - "updateTime": "2021-03-15T13:44:39.039375Z" + "updateTime": "2021-03-15T13:44:39.039375Z", } service_accounts = { - 'nextPageToken': '', - 'serviceAccounts': [ + "nextPageToken": "", + "serviceAccounts": [ service_account1, service_account2, - ] + ], } key_without_secret = { "name": "projects/c14u0p894l47cdd1o3pg/serviceaccounts/" - + "c18jpqmolv9076epsv1g/keys/c18rs36olv9021epsv2g", + + "c18jpqmolv9076epsv1g/keys/c18rs36olv9021epsv2g", "id": "c18rs36olv9021epsv2g", - "createTime": "2021-03-17T08:25:48.953067Z" + "createTime": "2021-03-17T08:25:48.953067Z", } key_with_secret = { "key": { "name": "projects/c14u9p095l47ccv1o3pg/serviceaccounts/" - + "c18ppqmoiv9008epsv1g/keys/c19tg0eoiv9008epsv50", + + "c18ppqmoiv9008epsv1g/keys/c19tg0eoiv9008epsv50", "id": "c19tg0eoiv9008epsv50", "createTime": "2021-03-17T10:20:49.231251Z", }, - "secret": "239dcd912b8041a58054f843a2a633a4" + "secret": "239dcd912b8041a58054f843a2a633a4", } keys = { - 'nextPageToken': '', - 'keys': [ + "nextPageToken": "", + "keys": [ key_without_secret, key_without_secret, - ] + ], } user_member = { "name": "organizations/c11humq0ss9o036gu53g/members/9201", "displayName": "my-org-member", - "roles": [ - "roles/organization.admin" - ], + "roles": ["roles/organization.admin"], "status": "ACCEPTED", "email": "my_user_account@disruptive-technologies.com", "accountType": "USER", - "createTime": "1970-01-01T00:00:00.000000Z" + "createTime": "1970-01-01T00:00:00.000000Z", } service_account_member = { "name": "organizations/c10humqoss90032gu54g/members/c17n9hn915gg00c8ievg", "displayName": "localdev", - "roles": [ - "roles/project.developer" - ], + "roles": ["roles/project.developer"], "status": "ACCEPTED", "email": "c17m9hn815gg00c8levu@c14u9p094l47cdv1o3pg." + "serviceaccount.d21s.com", "accountType": "SERVICE_ACCOUNT", - "createTime": "2021-03-15T13:45:53.964040Z" + "createTime": "2021-03-15T13:45:53.964040Z", } members = { - 'nextPageToken': '', - 'members': [ + "nextPageToken": "", + "members": [ user_member, service_account_member, - ] + ], } project_user_role = { @@ -596,8 +495,8 @@ "dataconnector.read", "serviceaccount.read", "serviceaccount.key.read", - "emulator.read" - ] + "emulator.read", + ], } project_developer_role = { @@ -620,47 +519,43 @@ "emulator.read", "emulator.update", "emulator.create", - "emulator.delete" - ] + "emulator.delete", + ], } roles = { - 'nextPageToken': '', - 'roles': [ + "nextPageToken": "", + "roles": [ project_user_role, project_developer_role, - ] + ], } metrics = { - 'metrics': { - 'successCount': 9, - 'errorCount': 0, - 'latency99p': '0.411s', + "metrics": { + "successCount": 9, + "errorCount": 0, + "latency99p": "0.411s", } } created_temperature_emulator = { - 'name': 'projects/c14u9p094l47cdv1o3pg/devices/emuc1so7tgttn8sjobqvvug', - 'type': 'temperature', - 'productNumber': '102058', - 'labels': { - 'key': 'value', - 'name': 'new-device', - 'virtual-sensor': '', - } + "name": "projects/c14u9p094l47cdv1o3pg/devices/emuc1so7tgttn8sjobqvvug", + "type": "temperature", + "productNumber": "102058", + "labels": { + "key": "value", + "name": "new-device", + "virtual-sensor": "", + }, } touch_event = { "eventId": "01", "targetName": "/projets/project_id/devices/device_id ", "eventType": "touch", - "data": { - "touch": { - "updateTime": "2019-05-16T08:13:15.361624Z" - } - }, - "timestamp": "1970-01-01T00:00:00Z" + "data": {"touch": {"updateTime": "2019-05-16T08:13:15.361624Z"}}, + "timestamp": "1970-01-01T00:00:00Z", } temperature_event = { @@ -672,15 +567,12 @@ "value": 24.9, "isBackfilled": False, "samples": [ - { - "value": 24.9, - "sampleTime": "2019-05-16T08:15:18.318751Z" - } + {"value": 24.9, "sampleTime": "2019-05-16T08:15:18.318751Z"} ], - "updateTime": "2019-05-16T08:15:18.318751Z" + "updateTime": "2019-05-16T08:15:18.318751Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } object_present_event = { @@ -690,10 +582,10 @@ "data": { "objectPresent": { "state": "NOT_PRESENT", - "updateTime": "2019-05-16T08:37:10.711412Z" + "updateTime": "2019-05-16T08:37:10.711412Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } humidity_event = { @@ -708,14 +600,14 @@ { "temperature": 22.45, "relativeHumidity": 17, - "sampleTime": "2019-05-16T06:13:46.369000Z" + "sampleTime": "2019-05-16T06:13:46.369000Z", } ], "isBackfilled": False, - "updateTime": "2019-05-16T06:13:46.369000Z" + "updateTime": "2019-05-16T06:13:46.369000Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } object_present_count_event = { @@ -725,10 +617,10 @@ "data": { "objectPresentCount": { "total": 4176, - "updateTime": "2019-05-16T08:23:43.209000Z" + "updateTime": "2019-05-16T08:23:43.209000Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } touch_count_event = { @@ -738,10 +630,10 @@ "data": { "touchCount": { "total": 469, - "updateTime": "2019-05-16T08:25:21.604000Z" + "updateTime": "2019-05-16T08:25:21.604000Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } water_present_event = { @@ -751,10 +643,10 @@ "data": { "waterPresent": { "state": "PRESENT", - "updateTime": "2019-05-16T08:43:16.266000Z" + "updateTime": "2019-05-16T08:43:16.266000Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } network_status_event = { @@ -770,13 +662,13 @@ { "id": "bdkjbo2v0000uk377c4g", "signalStrength": 45, - "rssi": -83 + "rssi": -83, } ], - "transmissionMode": "LOW_POWER_STANDARD_MODE" + "transmissionMode": "LOW_POWER_STANDARD_MODE", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } battery_status_event = { @@ -786,10 +678,10 @@ "data": { "batteryStatus": { "percentage": 100, - "updateTime": "2019-05-16T08:21:21.076013Z" + "updateTime": "2019-05-16T08:21:21.076013Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } labels_changed_event = { @@ -797,18 +689,11 @@ "targetName": "/projets/project_id/devices/device_id ", "eventType": "labelsChanged", "data": { - "added": { - "label-key": "label-value" - }, - "modified": { - "label-key": "new-label-value" - }, - "removed": [ - "remove-key1", - "remove-key2" - ] + "added": {"label-key": "label-value"}, + "modified": {"label-key": "new-label-value"}, + "removed": ["remove-key1", "remove-key2"], }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } connection_status_event = { @@ -818,14 +703,11 @@ "data": { "connectionStatus": { "connection": "ETHERNET", - "available": [ - "CELLULAR", - "ETHERNET" - ], - "updateTime": "2019-05-16T08:21:21.076013Z" + "available": ["CELLULAR", "ETHERNET"], + "updateTime": "2019-05-16T08:21:21.076013Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } ethernet_status_event = { @@ -837,10 +719,10 @@ "macAddress": "f0:b5:b7:00:0a:08", "ipAddress": "10.0.0.1", "errors": [], - "updateTime": "2019-05-16T08:21:21.076013Z" + "updateTime": "2019-05-16T08:21:21.076013Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } cellular_status_event = { @@ -851,36 +733,31 @@ "cellularStatus": { "signalStrength": 80, "errors": [], - "updateTime": "2019-05-16T08:21:21.076013Z" + "updateTime": "2019-05-16T08:21:21.076013Z", } }, - "timestamp": "1970-01-01T00:00:00Z" + "timestamp": "1970-01-01T00:00:00Z", } co2_event = { "eventId": "u7pbuijjnlactnn1p510", "targetName": "projects/i75ivl3go7df88ctp0uu/devices/b6sfppl7rihg1dm4ud8g", "eventType": "co2", - "data": { - "co2": { - "ppm": 526, - "updateTime": "2022-01-27T15:50:34.471000Z" - } - }, - "timestamp": "2022-01-27T15:50:34.471000Z" + "data": {"co2": {"ppm": 526, "updateTime": "2022-01-27T15:50:34.471000Z"}}, + "timestamp": "2022-01-27T15:50:34.471000Z", } motion_event = { - 'eventId': '9j53ftiet8nog4fac7ug', - 'targetName': 'projects/ci429ep0vlodur6n23f0/devices/emucj4fj3epdjrin69t0v80', - 'eventType': 'motion', - 'data': { - 'motion': { - 'state': 'MOTION_DETECTED', - 'updateTime': '2023-08-02T11:03:18.595777Z', + "eventId": "9j53ftiet8nog4fac7ug", + "targetName": "projects/ci429ep0vlodur6n23f0/devices/emucj4fj3epdjrin69t0v80", + "eventType": "motion", + "data": { + "motion": { + "state": "MOTION_DETECTED", + "updateTime": "2023-08-02T11:03:18.595777Z", }, }, - 'timestamp': '2023-08-02T11:03:18.595777Z', + "timestamp": "2023-08-02T11:03:18.595777Z", } pressure_event = { @@ -890,15 +767,15 @@ "data": { "pressure": { "pascal": 99301, - "updateTime": "2022-01-27T15:50:34.471000Z" + "updateTime": "2022-01-27T15:50:34.471000Z", } }, - "timestamp": "2022-01-27T15:50:34.471000Z" + "timestamp": "2022-01-27T15:50:34.471000Z", } event_history_each_type = { - 'nextPageToken': '', - 'events': [ + "nextPageToken": "", + "events": [ touch_event, temperature_event, object_present_event, @@ -915,57 +792,63 @@ co2_event, motion_event, pressure_event, - ] + ], } -stream_ping = b'{"result":{"event":{"eventId":"c18tihhh9sn7fi2hur50",'\ - b'"targetName":"projects/c14u9p094l47ccv1o3p9","eventType":'\ +stream_ping = ( + b'{"result":{"event":{"eventId":"c18tihhh9sn7fi2hur50",' + b'"targetName":"projects/c14u9p094l47ccv1o3p9","eventType":' b'"ping","data":null,"timestamp":"2021-04-21T07:50:30.604786Z"}}}' - -stream_temperature_event = b'{"result":{"event":{"eventId":'\ - b'"d1vtobtd83ut9sd2bj9g","targetName":"projects/914u9p094l47cdv1o3pg'\ - b'/devices/emui17m69nlq0bgk44smcng","eventType":"temperature","data"'\ - b':{"temperature":{"value":5,"updateTime":"2021-04-21T08:'\ - b'15:43.512330Z","isBackfilled":"False",'\ - b'"samples":[{"value":5,"sampleTime":"2021-04-21T08:'\ +) + +stream_temperature_event = ( + b'{"result":{"event":{"eventId":' + b'"d1vtobtd83ut9sd2bj9g","targetName":"projects/914u9p094l47cdv1o3pg' + b'/devices/emui17m69nlq0bgk44smcng","eventType":"temperature","data"' + b':{"temperature":{"value":5,"updateTime":"2021-04-21T08:' + b'15:43.512330Z","isBackfilled":"False",' + b'"samples":[{"value":5,"sampleTime":"2021-04-21T08:' b'15:43.512330Z"}]}},"timestamp":"2021-04-21T08:15:43.512330Z"}}}' - -stream_networkstatus_event = b'{"result":{"event":{"eventId":"c1vtubtd83it'\ - b'9ud2bja0","targetName":"projects/c14u9p094l47cdb1oipg/devices/'\ - b'emuc17m69nlq0bgk4osmcug","eventType":"networkStatus","data"'\ - b':{"networkStatus":{"signalStrength":99,"rssi":0,"updateTime"'\ - b':"2021-04-21T08:15:43.520167Z","cloudConnectors":[{"id":'\ - b'"emulated-ccon","signalStrength":99,"rssi":0}],'\ - b'"transmissionMode":"LOW_POWER_STANDARD_MODE"}},"timestamp":'\ +) + +stream_networkstatus_event = ( + b'{"result":{"event":{"eventId":"c1vtubtd83it' + b'9ud2bja0","targetName":"projects/c14u9p094l47cdb1oipg/devices/' + b'emuc17m69nlq0bgk4osmcug","eventType":"networkStatus","data"' + b':{"networkStatus":{"signalStrength":99,"rssi":0,"updateTime"' + b':"2021-04-21T08:15:43.520167Z","cloudConnectors":[{"id":' + b'"emulated-ccon","signalStrength":99,"rssi":0}],' + b'"transmissionMode":"LOW_POWER_STANDARD_MODE"}},"timestamp":' b'"2021-04-21T08:15:43.520167Z"}}}' +) transfer_device_no_errors = { - 'transferredDevices': [ - 'projects/source_project/devices/device_id1', - 'projects/source_project/devices/device_id2', + "transferredDevices": [ + "projects/source_project/devices/device_id1", + "projects/source_project/devices/device_id2", ], - 'transferErrors': [], + "transferErrors": [], } transfer_device_errors = { - 'transferredDevices': [ - 'projects/source_project/devices/device_id1', - 'projects/source_project/devices/device_id2', + "transferredDevices": [ + "projects/source_project/devices/device_id1", + "projects/source_project/devices/device_id2", ], - 'transferErrors': [ + "transferErrors": [ { - 'device': 'projects/source_project/devices/123', - 'status': { - 'code': 'NOT_FOUND', - 'message': 'resource not found', - } + "device": "projects/source_project/devices/123", + "status": { + "code": "NOT_FOUND", + "message": "resource not found", + }, }, { - 'device': 'projects/source_project/devices/abc', - 'status': { - 'code': 'NOT_FOUND', - 'message': 'resource not found', - } + "device": "projects/source_project/devices/abc", + "status": { + "code": "NOT_FOUND", + "message": "resource not found", + }, }, ], } @@ -976,150 +859,150 @@ "device": "/projects//devices/", "status": { "code": "INVALID_ARGUMENT", - "message": "Max labels reached for device." - } + "message": "Max labels reached for device.", + }, }, { "device": "/projects//devices/", "status": { "code": "INTERNAL_ERROR", - "message": "Operation timed out. Retry again in a few seconds." - } - } + "message": "Operation timed out. Retry again in a few seconds.", + }, + }, ] } claim_error_device_already_claimed = { - 'deviceId': 'b', - 'code': 'ALREADY_CLAIMED', - 'message': 'The device was previously claimed', + "deviceId": "b", + "code": "ALREADY_CLAIMED", + "message": "The device was previously claimed", } claim_error_device_not_found = { - 'deviceId': 'c', - 'code': 'NOT_FOUND', - 'message': 'The device was not found', + "deviceId": "c", + "code": "NOT_FOUND", + "message": "The device was not found", } claim_error_kit_not_found = { - 'kitId': 'd', - 'code': 'NOT_FOUND', - 'message': 'The kit was not found', + "kitId": "d", + "code": "NOT_FOUND", + "message": "The kit was not found", } claimed_devices = { - 'claimedDevices': [ + "claimedDevices": [ { - 'deviceId': 'a', - 'deviceType': 'temperature', - 'productNumber': 'a', - 'isClaimed': True, + "deviceId": "a", + "deviceType": "temperature", + "productNumber": "a", + "isClaimed": True, }, { - 'deviceId': 'b', - 'deviceType': 'temperature', - 'productNumber': 'b', - 'isClaimed': True, + "deviceId": "b", + "deviceType": "temperature", + "productNumber": "b", + "isClaimed": True, }, ], - 'claimErrors': { - 'devices': [], - 'kits': [], - } + "claimErrors": { + "devices": [], + "kits": [], + }, } claimed_device_already_claimed = { - 'claimedDevices': [ + "claimedDevices": [ { - 'deviceId': 'a', - 'deviceType': 'temperature', - 'productNumber': 'a', - 'isClaimed': True, + "deviceId": "a", + "deviceType": "temperature", + "productNumber": "a", + "isClaimed": True, }, ], - 'claimErrors': { - 'devices': [ + "claimErrors": { + "devices": [ claim_error_device_already_claimed, ], - 'kits': [], - } + "kits": [], + }, } claimed_device_not_found = { - 'claimedDevices': [ + "claimedDevices": [ { - 'deviceId': 'a', - 'deviceType': 'temperature', - 'productNumber': 'a', - 'isClaimed': True, + "deviceId": "a", + "deviceType": "temperature", + "productNumber": "a", + "isClaimed": True, }, ], - 'claimErrors': { - 'devices': [ + "claimErrors": { + "devices": [ claim_error_device_not_found, ], - 'kits': [], - } + "kits": [], + }, } claimed_kit_not_found = { - 'claimedDevices': [ + "claimedDevices": [ { - 'deviceId': 'a', - 'deviceType': 'temperature', - 'productNumber': 'a', - 'isClaimed': True, + "deviceId": "a", + "deviceType": "temperature", + "productNumber": "a", + "isClaimed": True, }, ], - 'claimErrors': { - 'devices': [], - 'kits': [claim_error_kit_not_found], - } + "claimErrors": { + "devices": [], + "kits": [claim_error_kit_not_found], + }, } claim_info_kit = { - 'type': 'KIT', - 'kit': { - 'kitId': 'fff000', - 'displayName': 'Starter Kit EU, 5 sensors', - 'sensors': { - 'totalCount': 5, - 'claimedCount': 0, - }, - 'cloudConnectors': { - 'totalCount': 1, - 'claimedCount': 0, - }, - 'devices': [ + "type": "KIT", + "kit": { + "kitId": "fff000", + "displayName": "Starter Kit EU, 5 sensors", + "sensors": { + "totalCount": 5, + "claimedCount": 0, + }, + "cloudConnectors": { + "totalCount": 1, + "claimedCount": 0, + }, + "devices": [ { - 'deviceId': 'a', - 'deviceType': 'touch', - 'productNumber': '', - 'isClaimed': True, + "deviceId": "a", + "deviceType": "touch", + "productNumber": "", + "isClaimed": True, }, { - 'deviceId': 'b', - 'deviceType': 'proximity', - 'productNumber': '', - 'isClaimed': True, - }, + "deviceId": "b", + "deviceType": "proximity", + "productNumber": "", + "isClaimed": True, + }, { - 'deviceId': 'c', - 'deviceType': 'temperature', - 'productNumber': '', - 'isClaimed': True, + "deviceId": "c", + "deviceType": "temperature", + "productNumber": "", + "isClaimed": True, }, { - 'deviceId': 'd', - 'deviceType': 'proximity', - 'productNumber': '', - 'isClaimed': True, + "deviceId": "d", + "deviceType": "proximity", + "productNumber": "", + "isClaimed": True, }, { - 'deviceId': 'e', - 'deviceType': 'temperature', - 'productNumber': '', - 'isClaimed': True, + "deviceId": "e", + "deviceType": "temperature", + "productNumber": "", + "isClaimed": True, }, ], }, diff --git a/tests/framework.py b/tests/framework.py index c73866e..f60a7ac 100644 --- a/tests/framework.py +++ b/tests/framework.py @@ -4,7 +4,7 @@ from disruptive.authentication import Unauthenticated -class RequestsReponseMock(): +class RequestsReponseMock: """ A simple class used to imitate an requests.Response object. @@ -31,8 +31,7 @@ def iter_lines(self, decode_unicode=False): raise KeyboardInterrupt -class RequestMock(): - +class RequestMock: def __init__(self, mocker): self._mocker = mocker @@ -49,18 +48,18 @@ def __init__(self, mocker): self.iter_data = [] self.request_patcher = self._mocker.patch( - 'requests.request', + "requests.request", side_effect=self._patched_requests_request, ) self.auth_expiration_patcher = self._mocker.patch.object( Unauthenticated, - '_has_expired', + "_has_expired", return_value=False, ) self.sleep_patcher = self._mocker.patch( - 'time.sleep', + "time.sleep", ) def _patched_requests_request(self, **kwargs): @@ -81,11 +80,11 @@ def assert_requested( url, params={}, headers={ - 'Authorization': '', - 'User-Agent': 'DisruptivePythonAPI/{} Python/{}'.format( + "Authorization": "", + "User-Agent": "DisruptivePythonAPI/{} Python/{}".format( dt.__version__, - f'{sys.version_info.major}.{sys.version_info.minor}', - ) + f"{sys.version_info.major}.{sys.version_info.minor}", + ), }, body=None, data=None, diff --git a/tests/test_auth.py b/tests/test_auth.py index a9393c4..cfcf9bf 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -6,15 +6,14 @@ from disruptive.authentication import ServiceAccountAuth -class TestAuth(): - +class TestAuth: def test_repr(self, request_mock): # Update the response json with a mock token response. res = dtapiresponses.auth_token_fresh request_mock.json = res # Fetch a role. - x = disruptive.Auth.service_account('key_id', 'secret', 'email') + x = disruptive.Auth.service_account("key_id", "secret", "email") # Evaluate __repr__ function and compare copy. eval(repr(x)) @@ -25,7 +24,7 @@ def test_service_account_auth(self, request_mock): request_mock.json = res # Call the two classmethod constructors. - auth = disruptive.Auth.service_account('key_id', 'secret', 'email') + auth = disruptive.Auth.service_account("key_id", "secret", "email") # Assert token post request not sent at construction. request_mock.assert_request_count(0) @@ -39,7 +38,7 @@ def test_token_refresh(self, request_mock): request_mock.json = res # Create an authentication object. - auth = disruptive.Auth.service_account('key_id', 'secret', 'email') + auth = disruptive.Auth.service_account("key_id", "secret", "email") # Verify expired token. assert auth._has_expired() @@ -57,17 +56,17 @@ def test_token_refresh(self, request_mock): def test_raise_none_credential(self): # Verify InvalidTypeError raised at None input credential. with pytest.raises(TypeError): - disruptive.Auth.service_account(None, 'secret', 'email') + disruptive.Auth.service_account(None, "secret", "email") with pytest.raises(TypeError): - disruptive.Auth.service_account('key_id', None, 'email') + disruptive.Auth.service_account("key_id", None, "email") with pytest.raises(TypeError): - disruptive.Auth.service_account('key_id', 'secret', None) + disruptive.Auth.service_account("key_id", "secret", None) def test_raise_empty_string_credential(self): # Verify ConfigurationError raised at missing input credential. with pytest.raises(dterrors.ConfigurationError): - disruptive.Auth.service_account('', 'secret', 'email') + disruptive.Auth.service_account("", "secret", "email") with pytest.raises(dterrors.ConfigurationError): - disruptive.Auth.service_account('key_id', '', 'email') + disruptive.Auth.service_account("key_id", "", "email") with pytest.raises(dterrors.ConfigurationError): - disruptive.Auth.service_account('key_id', 'secret', '') + disruptive.Auth.service_account("key_id", "secret", "") diff --git a/tests/test_claim.py b/tests/test_claim.py index 2459f70..38fdd77 100644 --- a/tests/test_claim.py +++ b/tests/test_claim.py @@ -8,8 +8,7 @@ import tests.api_responses as dtapiresponses -class TestClaim(): - +class TestClaim: def test_claim(self, request_mock): @dataclass class TestCase: @@ -24,53 +23,53 @@ class TestCase: tests = [ TestCase( - name='claim devices', - give_project_id='p1', - give_device_ids=['a', 'b'], + name="claim devices", + give_project_id="p1", + give_device_ids=["a", "b"], give_kit_ids=[], give_dry_run=False, give_res=dtapiresponses.claimed_devices, - want_device_ids=['a', 'b'], + want_device_ids=["a", "b"], want_error=None, ), TestCase( - name='claim kits', - give_project_id='p1', + name="claim kits", + give_project_id="p1", give_device_ids=[], - give_kit_ids=['k1'], + give_kit_ids=["k1"], give_dry_run=False, give_res=dtapiresponses.claimed_devices, - want_device_ids=['a', 'b'], + want_device_ids=["a", "b"], want_error=None, ), TestCase( - name='device already claimed', - give_project_id='p1', - give_device_ids=['a'], + name="device already claimed", + give_project_id="p1", + give_device_ids=["a"], give_kit_ids=[], give_dry_run=False, give_res=dtapiresponses.claimed_device_already_claimed, - want_device_ids=['a'], + want_device_ids=["a"], want_error=dterrors.ClaimErrorDeviceAlreadyClaimed, ), TestCase( - name='device not found', - give_project_id='p1', - give_device_ids=['a'], + name="device not found", + give_project_id="p1", + give_device_ids=["a"], give_kit_ids=[], give_dry_run=False, give_res=dtapiresponses.claimed_device_not_found, - want_device_ids=['a'], + want_device_ids=["a"], want_error=dterrors.ClaimErrorDeviceNotFound, ), TestCase( - name='kit not found', - give_project_id='p1', + name="kit not found", + give_project_id="p1", give_device_ids=[], - give_kit_ids=['a'], + give_kit_ids=["a"], give_dry_run=False, give_res=dtapiresponses.claimed_kit_not_found, - want_device_ids=['a'], + want_device_ids=["a"], want_error=dterrors.ClaimErrorKitNotFound, ), ] @@ -85,8 +84,9 @@ class TestCase: ) for i in range(len(devices)): - assert devices[i].device_id == test.want_device_ids[i], \ + assert devices[i].device_id == test.want_device_ids[i], ( test.name + ) for error in errors: if test.want_error is not None: @@ -106,13 +106,13 @@ class TestCase: tests = [ TestCase( - name='test 1', - give_identifier='id1', + name="test 1", + give_identifier="id1", give_res=dtapiresponses.claim_info_kit, want_err=None, ), TestCase( - name='identifier type conflict', + name="identifier type conflict", give_identifier=99, give_res={}, want_err=TypeError, @@ -138,38 +138,38 @@ class TestCase: tests = [ TestCase( - name='unknown code ClaimError', + name="unknown code ClaimError", give_res={ - 'devices': [{'code': 'UNKNOWN_CODE'}], - 'kits': [{'code': 'UNKNOWN_CODE'}], + "devices": [{"code": "UNKNOWN_CODE"}], + "kits": [{"code": "UNKNOWN_CODE"}], }, want_err=dterrors.ClaimError, ), TestCase( - name='device already claimed', + name="device already claimed", give_res={ - 'devices': [ + "devices": [ dtapiresponses.claim_error_device_already_claimed, ], - 'kits': [], + "kits": [], }, want_err=dterrors.ClaimErrorDeviceAlreadyClaimed, ), TestCase( - name='device not found', + name="device not found", give_res={ - 'devices': [ + "devices": [ dtapiresponses.claim_error_device_not_found, ], - 'kits': [], + "kits": [], }, want_err=dterrors.ClaimErrorDeviceNotFound, ), TestCase( - name='kit not found', + name="kit not found", give_res={ - 'devices': [], - 'kits': [ + "devices": [], + "kits": [ dtapiresponses.claim_error_kit_not_found, ], }, diff --git a/tests/test_data_connector.py b/tests/test_data_connector.py index 690951e..34d2a94 100644 --- a/tests/test_data_connector.py +++ b/tests/test_data_connector.py @@ -4,8 +4,7 @@ import tests.api_responses as dtapiresponses -class TestDataconnector(): - +class TestDataconnector: def test_repr(self, request_mock): # Update the response data with Data Connector data. res = dtapiresponses.configured_data_connector @@ -13,8 +12,8 @@ def test_repr(self, request_mock): # Fetch a Data Connector.. x = disruptive.DataConnector.get_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Evaluate __repr__ function and compare copy. @@ -28,22 +27,22 @@ def test_attributes(self, request_mock): # Call the appropriate endpoint. d = disruptive.DataConnector.get_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Assert attributes unpacked correctly. - assert d.data_connector_id == r['name'].split('/')[-1] - assert d.project_id == r['name'].split('/')[1] - assert d.status == r['status'] - assert d.display_name == r['displayName'] - assert d.event_types == r['events'] - assert d.labels == r['labels'] - assert d.data_connector_type == r['type'] + assert d.data_connector_id == r["name"].split("/")[-1] + assert d.project_id == r["name"].split("/")[1] + assert d.status == r["status"] + assert d.display_name == r["displayName"] + assert d.event_types == r["events"] + assert d.labels == r["labels"] + assert d.data_connector_type == r["type"] assert isinstance(d.config, disruptive.DataConnector.HttpPushConfig) - assert d.config.url == r['httpConfig']['url'] - assert d.config.signature_secret == r['httpConfig']['signatureSecret'] - assert d.config.headers == r['httpConfig']['headers'] + assert d.config.url == r["httpConfig"]["url"] + assert d.config.signature_secret == r["httpConfig"]["signatureSecret"] + assert d.config.headers == r["httpConfig"]["headers"] def test_unknown_config_type(self, request_mock): # Update the response json with a mock Data Connector of unknown type. @@ -52,12 +51,12 @@ def test_unknown_config_type(self, request_mock): # Call an endpoint to construct a Data Connector object. d = disruptive.DataConnector.get_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Assert config attribute is None. - assert d.data_connector_type == 'unknown' + assert d.data_connector_type == "unknown" assert d.config is None def test_get_data_connector(self, request_mock): @@ -66,15 +65,15 @@ def test_get_data_connector(self, request_mock): # Call the appropriate endpoint. d = disruptive.DataConnector.get_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Verify expected outgoing parameters in request. url = disruptive.base_url - url += '/projects/project_id/dataconnectors/data_connector_id' + url += "/projects/project_id/dataconnectors/data_connector_id" request_mock.assert_requested( - method='GET', + method="GET", url=url, ) @@ -90,13 +89,13 @@ def test_list_data_connectors(self, request_mock): # Call the appropriate endpoint. data_connectors = disruptive.DataConnector.list_data_connectors( - project_id='project_id', + project_id="project_id", ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/project_id/dataconnectors' + url = disruptive.base_url + "/projects/project_id/dataconnectors" request_mock.assert_requested( - method='GET', + method="GET", url=url, ) @@ -115,40 +114,41 @@ def test_create_data_connector(self, request_mock): # Call DataConnector.configured_data_connector(). d = disruptive.DataConnector.create_data_connector( - project_id='c0md3pm0p7bet3vico8g', - display_name='my-new-dcon', - labels=['name', 'custom-label-01', 'custom_label-02'], + project_id="c0md3pm0p7bet3vico8g", + display_name="my-new-dcon", + labels=["name", "custom-label-01", "custom_label-02"], config=disruptive.DataConnector.HttpPushConfig( - url='https://584087e0a1fa.eu.ngrok.io/api/endpoint', - signature_secret='some-very-good-secret', + url="https://584087e0a1fa.eu.ngrok.io/api/endpoint", + signature_secret="some-very-good-secret", headers={ - 'another-header': 'header-contents', - 'some-header': 'abc123', - } - ) + "another-header": "header-contents", + "some-header": "abc123", + }, + ), ) # Verify expected outgoing parameters in request. # Especially the body is important here. url = disruptive.base_url - url += '/projects/c0md3pm0p7bet3vico8g/dataconnectors' + url += "/projects/c0md3pm0p7bet3vico8g/dataconnectors" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'status': 'ACTIVE', - 'events': [], - 'labels': ['name', 'custom-label-01', 'custom_label-02'], - 'displayName': 'my-new-dcon', - 'type': 'HTTP_PUSH', - 'httpConfig': { - 'url': 'https://584087e0a1fa.eu.ngrok.io/api/endpoint', - 'signatureSecret': 'some-very-good-secret', - 'headers': { - 'another-header': 'header-contents', - 'some-header': 'abc123'} - } - } + "status": "ACTIVE", + "events": [], + "labels": ["name", "custom-label-01", "custom_label-02"], + "displayName": "my-new-dcon", + "type": "HTTP_PUSH", + "httpConfig": { + "url": "https://584087e0a1fa.eu.ngrok.io/api/endpoint", + "signatureSecret": "some-very-good-secret", + "headers": { + "another-header": "header-contents", + "some-header": "abc123", + }, + }, + }, ) # Assert single request sent. @@ -166,39 +166,39 @@ def test_create_data_connector_http_push_config(self, request_mock): # Mock the DataConnector constructor to do nothing as # the response is not relevant to this test. - with patch('disruptive.DataConnector.__init__') as init_mock: + with patch("disruptive.DataConnector.__init__") as init_mock: # Do nothing and return None. init_mock.return_value = None # Call DataConnector.create_data_connector for type HTTP_PUSH. disruptive.DataConnector.create_data_connector( - project_id='project_id', + project_id="project_id", config=disruptive.DataConnector.HttpPushConfig( - url='some-url', - signature_secret='some-secret', - headers={'name': 'value'}, + url="some-url", + signature_secret="some-secret", + headers={"name": "value"}, ), ) # Verify expected outgoing parameters in request. # Especially the body is important here. - url = disruptive.base_url+'/projects/project_id/dataconnectors' + url = disruptive.base_url + "/projects/project_id/dataconnectors" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'status': 'ACTIVE', - 'events': [], - 'labels': [], - 'type': 'HTTP_PUSH', - 'httpConfig': { - 'url': 'some-url', - 'signatureSecret': 'some-secret', - 'headers': { - 'name': 'value', - } - } - } + "status": "ACTIVE", + "events": [], + "labels": [], + "type": "HTTP_PUSH", + "httpConfig": { + "url": "some-url", + "signatureSecret": "some-secret", + "headers": { + "name": "value", + }, + }, + }, ) def test_update_data_connector(self, request_mock): @@ -208,44 +208,44 @@ def test_update_data_connector(self, request_mock): # Call DataConnector.configured_data_connector(). d = disruptive.DataConnector.update_data_connector( - data_connector_id='c16pegipidie7lltrefg', - project_id='c0md3pm0p7bet3vico8g', - display_name='my-new-dcon', - labels=['name', 'custom-label-01', 'custom_label-02'], - status='ACTIVE', - event_types=['touch', 'temperature', 'objectPresent'], + data_connector_id="c16pegipidie7lltrefg", + project_id="c0md3pm0p7bet3vico8g", + display_name="my-new-dcon", + labels=["name", "custom-label-01", "custom_label-02"], + status="ACTIVE", + event_types=["touch", "temperature", "objectPresent"], config=disruptive.DataConnector.HttpPushConfig( - url='https://584087e0a1fa.eu.ngrok.io/api/endpoint', - signature_secret='some-very-good-secret', + url="https://584087e0a1fa.eu.ngrok.io/api/endpoint", + signature_secret="some-very-good-secret", headers={ - 'another-header': 'header-contents', - 'some-header': 'abc123', - } - ) + "another-header": "header-contents", + "some-header": "abc123", + }, + ), ) # Verify expected outgoing parameters in request. # Especially the body is important here. url = disruptive.base_url - url += '/projects/c0md3pm0p7bet3vico8g' - url += '/dataconnectors/c16pegipidie7lltrefg' + url += "/projects/c0md3pm0p7bet3vico8g" + url += "/dataconnectors/c16pegipidie7lltrefg" request_mock.assert_requested( - method='PATCH', + method="PATCH", url=url, body={ - 'displayName': 'my-new-dcon', - 'status': 'ACTIVE', - 'events': ['touch', 'temperature', 'objectPresent'], - 'labels': ['name', 'custom-label-01', 'custom_label-02'], - 'httpConfig': { - 'url': 'https://584087e0a1fa.eu.ngrok.io/api/endpoint', - 'signatureSecret': 'some-very-good-secret', - 'headers': { - 'another-header': 'header-contents', - 'some-header': 'abc123', - } - } - } + "displayName": "my-new-dcon", + "status": "ACTIVE", + "events": ["touch", "temperature", "objectPresent"], + "labels": ["name", "custom-label-01", "custom_label-02"], + "httpConfig": { + "url": "https://584087e0a1fa.eu.ngrok.io/api/endpoint", + "signatureSecret": "some-very-good-secret", + "headers": { + "another-header": "header-contents", + "some-header": "abc123", + }, + }, + }, ) # Assert single request sent. @@ -264,22 +264,22 @@ def test_update_data_connector_change_nothing(self, request_mock): # Mock the DataConnector constructor to do nothing as # the response is not relevant to this test. - with patch('disruptive.DataConnector.__init__') as init_mock: + with patch("disruptive.DataConnector.__init__") as init_mock: # Do nothing and return None. init_mock.return_value = None # Call DataConnector.configured_data_connector() with only # required parameters, basically asking the API to change nothing. disruptive.DataConnector.update_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Verify that all optional parameters are not included in the body. url = disruptive.base_url - url += '/projects/project_id/dataconnectors/data_connector_id' + url += "/projects/project_id/dataconnectors/data_connector_id" request_mock.assert_requested( - method='PATCH', + method="PATCH", url=url, body={}, ) @@ -287,15 +287,15 @@ def test_update_data_connector_change_nothing(self, request_mock): def test_delete_data_connector(self, request_mock): # Call the DataConnector.delete_data_connector() method. d = disruptive.DataConnector.delete_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Verify expected outgoing parameters in request. url = disruptive.base_url - url += '/projects/project_id/dataconnectors/data_connector_id' + url += "/projects/project_id/dataconnectors/data_connector_id" request_mock.assert_requested( - method='DELETE', + method="DELETE", url=url, ) @@ -311,15 +311,15 @@ def test_get_metrics(self, request_mock): # Call DataConnector.get_metrics. m = disruptive.DataConnector.get_metrics( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Verify expected outgoing parameters in request. url = disruptive.base_url - url += '/projects/project_id/dataconnectors/data_connector_id:metrics' + url += "/projects/project_id/dataconnectors/data_connector_id:metrics" request_mock.assert_requested( - method='GET', + method="GET", url=url, ) @@ -332,15 +332,15 @@ def test_get_metrics(self, request_mock): def test_sync_data_connector(self, request_mock): # Call DataConnector.sync_data_connector. m = disruptive.DataConnector.sync_data_connector( - data_connector_id='data_connector_id', - project_id='project_id', + data_connector_id="data_connector_id", + project_id="project_id", ) # Verify expected outgoing parameters in request. url = disruptive.base_url - url += '/projects/project_id/dataconnectors/data_connector_id:sync' + url += "/projects/project_id/dataconnectors/data_connector_id:sync" request_mock.assert_requested( - method='POST', + method="POST", url=url, ) @@ -362,18 +362,18 @@ def test_http_push_config_inbound(self, request_mock): # Call the appropriate endpoint. d = disruptive.DataConnector.get_data_connector( - data_connector_id='c16eegpdidie7lltpefg', - project_id='c0md3mm0c7pet3vico8g', + data_connector_id="c16eegpdidie7lltpefg", + project_id="c0md3mm0c7pet3vico8g", ) # Assert type and config instance. - assert d.data_connector_type == 'HTTP_PUSH' + assert d.data_connector_type == "HTTP_PUSH" assert isinstance(d.config, disruptive.DataConnector.HttpPushConfig) # Assert HttpPush attributes are set properly. - assert d.config.url == r['httpConfig']['url'] - assert d.config.signature_secret == r['httpConfig']['signatureSecret'] - assert d.config.headers == r['httpConfig']['headers'] + assert d.config.url == r["httpConfig"]["url"] + assert d.config.signature_secret == r["httpConfig"]["signatureSecret"] + assert d.config.headers == r["httpConfig"]["headers"] def test_http_push_config_outbound(self): """ @@ -384,26 +384,26 @@ def test_http_push_config_outbound(self): # Construct a HttpPush object. config = disruptive.DataConnector.HttpPushConfig( - url='some-url', - signature_secret='some-secret', + url="some-url", + signature_secret="some-secret", headers={ - 'h1': 'v1', - 'h2': 'v2', + "h1": "v1", + "h2": "v2", }, ) # Verify type attribute is correct. - assert config.data_connector_type == 'HTTP_PUSH' + assert config.data_connector_type == "HTTP_PUSH" # Test that _to_dict() method returns expected format. expected = { - 'url': 'some-url', - 'signatureSecret': 'some-secret', - 'headers': { - 'h1': 'v1', - 'h2': 'v2', - } + "url": "some-url", + "signatureSecret": "some-secret", + "headers": { + "h1": "v1", + "h2": "v2", + }, } key, value = config._to_dict() - assert key == 'httpConfig' + assert key == "httpConfig" assert value == expected diff --git a/tests/test_device.py b/tests/test_device.py index 46f2eeb..6959350 100644 --- a/tests/test_device.py +++ b/tests/test_device.py @@ -6,15 +6,14 @@ import tests.api_responses as dtapiresponses -class TestDevice(): - +class TestDevice: def test_repr(self, request_mock): # Update the response data with device data. res = dtapiresponses.touch_sensor request_mock.json = res # Fetch a device. - x = disruptive.Device.get_device('device_id', 'project_id') + x = disruptive.Device.get_device("device_id", "project_id") # Evaluate __repr__ function and compare copy. y = eval(repr(x)) @@ -26,23 +25,23 @@ def test_unpack(self, request_mock): request_mock.json = res # Call the appropriate endpoint. - d = disruptive.Device.get_device('device_id', 'project_id') + d = disruptive.Device.get_device("device_id", "project_id") # Assert attributes unpacked correctly. - assert d.device_id == res['name'].split('/')[-1] - assert d.device_type == res['type'] + assert d.device_id == res["name"].split("/")[-1] + assert d.device_type == res["type"] def test_get_device(self, request_mock): # Update the response data with device data. request_mock.json = dtapiresponses.touch_sensor # Call Device.get_device() method. - d = disruptive.Device.get_device('device_id', 'project_id') + d = disruptive.Device.get_device("device_id", "project_id") # Verify expected outgoing parameters in request. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/devices/device_id', + method="GET", + url=disruptive.base_url + "/projects/project_id/devices/device_id", ) # Assert single request sent. @@ -56,12 +55,12 @@ def test_get_device_project_wildcard(self, request_mock): request_mock.json = dtapiresponses.touch_sensor # Call Device.get_device() method without providing project_id. - d = disruptive.Device.get_device(device_id='device_id') + d = disruptive.Device.get_device(device_id="device_id") # Verify expected outgoing parameters in request. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/-/devices/device_id', + method="GET", + url=disruptive.base_url + "/projects/-/devices/device_id", ) # Assert single request sent. @@ -75,12 +74,12 @@ def test_list_devices(self, request_mock): request_mock.json = dtapiresponses.paginated_device_response # Call Device.list_devices() method. - devices = disruptive.Device.list_devices('project_id') + devices = disruptive.Device.list_devices("project_id") # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/project_id/devices' + url = disruptive.base_url + "/projects/project_id/devices" request_mock.assert_requested( - method='GET', + method="GET", url=url, ) @@ -100,26 +99,26 @@ def test_list_devices_optionals(self, request_mock): # Call Device.list_devices() method. devices = disruptive.Device.list_devices( - project_id='project_id', - query='some_filter', - device_ids=['device1, device2'], - device_types=['temperature'], - label_filters={'key': 'value'}, - order_by='reported.temperature.value', + project_id="project_id", + query="some_filter", + device_ids=["device1, device2"], + device_types=["temperature"], + label_filters={"key": "value"}, + order_by="reported.temperature.value", ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/project_id/devices' + url = disruptive.base_url + "/projects/project_id/devices" request_mock.assert_requested( - method='GET', + method="GET", url=url, params={ - 'query': 'some_filter', - 'device_ids': ['device1, device2'], - 'device_types': ['temperature'], - 'order_by': 'reported.temperature.value', - 'label_filters': ['key=value'], - } + "query": "some_filter", + "device_ids": ["device1, device2"], + "device_types": ["temperature"], + "order_by": "reported.temperature.value", + "label_filters": ["key=value"], + }, ) # Assert single request sent. @@ -138,29 +137,29 @@ def test_batch_update_labels(self, request_mock): # Call Device.batch_update_labels() method. d = disruptive.Device.batch_update_labels( - device_ids=['device_id1', 'device_id2', 'device_id3'], - project_id='project_id', + device_ids=["device_id1", "device_id2", "device_id3"], + project_id="project_id", set_labels={ - 'key1': 'value1', - 'key2': 'value2', + "key1": "value1", + "key2": "value2", }, - remove_labels=['remove-key'], + remove_labels=["remove-key"], ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/project_id/devices:batchUpdate' + url = disruptive.base_url + "/projects/project_id/devices:batchUpdate" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'devices': [ - 'projects/project_id/devices/device_id1', - 'projects/project_id/devices/device_id2', - 'projects/project_id/devices/device_id3', + "devices": [ + "projects/project_id/devices/device_id1", + "projects/project_id/devices/device_id2", + "projects/project_id/devices/device_id3", ], - 'addLabels': {'key1': 'value1', 'key2': 'value2'}, - 'removeLabels': ['remove-key'], - } + "addLabels": {"key1": "value1", "key2": "value2"}, + "removeLabels": ["remove-key"], + }, ) # Assert single request sent. @@ -175,22 +174,22 @@ def test_set_label(self, request_mock): # Call Device.set_label() method. d = disruptive.Device.set_label( - device_id='device_id', - project_id='project_id', - key='key', - value='value', + device_id="device_id", + project_id="project_id", + key="key", + value="value", ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/project_id/devices:batchUpdate' + url = disruptive.base_url + "/projects/project_id/devices:batchUpdate" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'devices': [ - 'projects/project_id/devices/device_id', + "devices": [ + "projects/project_id/devices/device_id", ], - 'addLabels': {'key': 'value'}, + "addLabels": {"key": "value"}, }, ) @@ -206,21 +205,21 @@ def test_remove_label(self, request_mock): # Call Device.remove_label() method. d = disruptive.Device.remove_label( - device_id='device_id', - project_id='project_id', - key='key', + device_id="device_id", + project_id="project_id", + key="key", ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/project_id/devices:batchUpdate' + url = disruptive.base_url + "/projects/project_id/devices:batchUpdate" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'devices': [ - 'projects/project_id/devices/device_id', + "devices": [ + "projects/project_id/devices/device_id", ], - 'removeLabels': ['key'], + "removeLabels": ["key"], }, ) @@ -236,22 +235,22 @@ def test_transfer_devices(self, request_mock): # Call Device.remove_label() method. d = disruptive.Device.transfer_devices( - device_ids=['device_id1', 'device_id2'], - source_project_id='source_project', - target_project_id='target_project', + device_ids=["device_id1", "device_id2"], + source_project_id="source_project", + target_project_id="target_project", ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/target_project/devices:transfer' + url = disruptive.base_url + "/projects/target_project/devices:transfer" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'devices': [ - 'projects/source_project/devices/device_id1', - 'projects/source_project/devices/device_id2', + "devices": [ + "projects/source_project/devices/device_id1", + "projects/source_project/devices/device_id2", ], - } + }, ) # Assert single request sent. @@ -266,29 +265,29 @@ def test_transfer_devices_errors(self, request_mock): request_mock.json = dtapiresponses.transfer_device_errors # Define device ids to transfer. - good_ids = ['device_id1', 'device_id2'] - bad_ids = ['abc', '123'] + good_ids = ["device_id1", "device_id2"] + bad_ids = ["abc", "123"] # Call Device.remove_label() method. d = disruptive.Device.transfer_devices( - device_ids=good_ids+bad_ids, - source_project_id='source_project', - target_project_id='target_project', + device_ids=good_ids + bad_ids, + source_project_id="source_project", + target_project_id="target_project", ) # Verify expected outgoing parameters in request. - url = disruptive.base_url+'/projects/target_project/devices:transfer' + url = disruptive.base_url + "/projects/target_project/devices:transfer" request_mock.assert_requested( - method='POST', + method="POST", url=url, body={ - 'devices': [ - 'projects/source_project/devices/device_id1', - 'projects/source_project/devices/device_id2', - 'projects/source_project/devices/abc', - 'projects/source_project/devices/123', + "devices": [ + "projects/source_project/devices/device_id1", + "projects/source_project/devices/device_id2", + "projects/source_project/devices/abc", + "projects/source_project/devices/123", ], - } + }, ) # Assert single request sent. @@ -310,12 +309,12 @@ def test_reported_no_data(self, request_mock): request_mock.json = dtapiresponses.null_reported_sensor # Call the appropriate endpoint. - d = disruptive.Device.get_device('device_id', 'project_id') + d = disruptive.Device.get_device("device_id", "project_id") # Assert None for all reported datas. for key in dtevents._EVENTS_MAP._api_names: # Skip labelsChanged as it does not exist in reported. - if key == 'labelsChanged': + if key == "labelsChanged": continue attr = dtevents._EVENTS_MAP._api_names[key].attr_name @@ -326,7 +325,7 @@ def test_reported_touch_data(self, request_mock): request_mock.json = dtapiresponses.touch_sensor # Call the appropriate endpoint. - d = disruptive.Device.get_device('device_id', 'project_id') + d = disruptive.Device.get_device("device_id", "project_id") # Assert appropriate reported data instances. assert isinstance(d.reported.network_status, dtevents.NetworkStatus) @@ -338,9 +337,9 @@ def test_reported_unknown_data(self, request_mock): request_mock.json = dtapiresponses.unknown_reported_sensor # Mock the warning logger. - with patch('disruptive.logging.warning') as warning_mock: + with patch("disruptive.logging.warning") as warning_mock: # Call the appropriate endpoint. - disruptive.Device.get_device('device_id', 'project_id') + disruptive.Device.get_device("device_id", "project_id") assert warning_mock.call_count == 1 @@ -349,12 +348,12 @@ def test_missing_product_number(self, request_mock): request_mock.json = dtapiresponses.null_reported_sensor # Call Device.get_device() method. - d = disruptive.Device.get_device('device_id', 'project_id') + d = disruptive.Device.get_device("device_id", "project_id") # Verify expected outgoing parameters in request. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/devices/device_id', + method="GET", + url=disruptive.base_url + "/projects/project_id/devices/device_id", ) # Assert single request sent. @@ -364,7 +363,7 @@ def test_missing_product_number(self, request_mock): assert isinstance(d, disruptive.Device) # Assert empty string as product number. - assert d.product_number == '' + assert d.product_number == "" def test_reported_constructor(self): events = [ @@ -388,5 +387,5 @@ def test_reported_constructor(self): for event in events: device_dict = dtapiresponses.temperature_sensor - device_dict['reported'] = event['data'] + device_dict["reported"] = event["data"] _ = disruptive.Device(device_dict) diff --git a/tests/test_emulator.py b/tests/test_emulator.py index 8f57743..939af3f 100644 --- a/tests/test_emulator.py +++ b/tests/test_emulator.py @@ -9,8 +9,7 @@ import tests.api_responses as dtapiresponses -class TestEmulator(): - +class TestEmulator: def test_create_device(self, request_mock): # Update the response data with device data. res = dtapiresponses.created_temperature_emulator @@ -18,23 +17,23 @@ def test_create_device(self, request_mock): # Call Emulator.create_device() method. d = dt.Emulator.create_device( - project_id='project_id', - device_type='temperature', - display_name='new-device', - labels={'key': 'value'}, + project_id="project_id", + device_type="temperature", + display_name="new-device", + labels={"key": "value"}, ) # Verify expected outgoing parameters in request. request_mock.assert_requested( - method='POST', - url=dt.emulator_base_url+'/projects/project_id/devices', + method="POST", + url=dt.emulator_base_url + "/projects/project_id/devices", body={ - 'type': 'temperature', - 'labels': { - 'key': 'value', - 'name': 'new-device', + "type": "temperature", + "labels": { + "key": "value", + "name": "new-device", }, - } + }, ) # Assert single request sent. @@ -46,14 +45,15 @@ def test_create_device(self, request_mock): def test_delete_device(self, request_mock): # Call Emulator.delete_device() method. d = dt.Emulator.delete_device( - device_id='device_id', - project_id='project_id', + device_id="device_id", + project_id="project_id", ) # Verify expected outgoing parameters in request. request_mock.assert_requested( - method='DELETE', - url=dt.emulator_base_url+'/projects/project_id/devices/device_id', + method="DELETE", + url=dt.emulator_base_url + + "/projects/project_id/devices/device_id", ) # Assert single request sent. @@ -63,11 +63,11 @@ def test_delete_device(self, request_mock): assert d is None def test_publish_event(self, request_mock): - project_id = 'test_project' - device_id = 'test_device' + project_id = "test_project" + device_id = "test_device" url = dt.emulator_base_url - url += f'/projects/{project_id}/devices/{device_id}:publish' - args = {'device_id': device_id, 'project_id': project_id} + url += f"/projects/{project_id}/devices/{device_id}:publish" + args = {"device_id": device_id, "project_id": project_id} # datetime.datetime.utcnow() is deprecated in Python 3.12 if sys.version_info.major >= 3 and sys.version_info.minor >= 11: @@ -84,13 +84,13 @@ class TestCase: tests = [ TestCase( - name='touch', + name="touch", give_type=dt.events.TOUCH, give_data=dt.events.Touch(now), want_err=None, ), TestCase( - name='temperature w/o samples', + name="temperature w/o samples", give_type=dt.events.TEMPERATURE, give_data=dt.events.Temperature( timestamp=now, @@ -99,7 +99,7 @@ class TestCase: want_err=None, ), TestCase( - name='temperature w/ samples', + name="temperature w/ samples", give_type=dt.events.TEMPERATURE, give_data=dt.events.Temperature( timestamp=now, @@ -115,16 +115,16 @@ class TestCase: want_err=None, ), TestCase( - name='object present', + name="object present", give_type=dt.events.OBJECT_PRESENT, give_data=dt.events.ObjectPresent( timestamp=now, - state='PRESENT', + state="PRESENT", ), want_err=None, ), TestCase( - name='humidity', + name="humidity", give_type=dt.events.HUMIDITY, give_data=dt.events.Humidity( timestamp=now, @@ -134,7 +134,7 @@ class TestCase: want_err=None, ), TestCase( - name='object present count', + name="object present count", give_type=dt.events.OBJECT_PRESENT_COUNT, give_data=dt.events.ObjectPresentCount( timestamp=now, @@ -143,7 +143,7 @@ class TestCase: want_err=None, ), TestCase( - name='touch count', + name="touch count", give_type=dt.events.TOUCH_COUNT, give_data=dt.events.TouchCount( timestamp=now, @@ -152,30 +152,30 @@ class TestCase: want_err=None, ), TestCase( - name='water present', + name="water present", give_type=dt.events.WATER_PRESENT, give_data=dt.events.WaterPresent( timestamp=now, - state='PRESENT', + state="PRESENT", ), want_err=None, ), TestCase( - name='network status', + name="network status", give_type=dt.events.NETWORK_STATUS, give_data=dt.events.NetworkStatus( timestamp=now, signal_strength=98, rssi=-66, - transmission_mode='LOW_POWER_STANDARD_MODE', + transmission_mode="LOW_POWER_STANDARD_MODE", cloud_connectors=[ dt.events.NetworkStatusCloudConnector( - device_id='ccon 1', + device_id="ccon 1", signal_strength=99, rssi=-77, ), dt.events.NetworkStatusCloudConnector( - device_id='ccon 2', + device_id="ccon 2", signal_strength=88, rssi=-61, ), @@ -184,7 +184,7 @@ class TestCase: want_err=None, ), TestCase( - name='battery status', + name="battery status", give_type=dt.events.BATTERY_STATUS, give_data=dt.events.BatteryStatus( timestamp=now, @@ -193,38 +193,38 @@ class TestCase: want_err=None, ), TestCase( - name='labels changed', + name="labels changed", give_type=dt.events.LABELS_CHANGED, give_data=dt.events.LabelsChanged( timestamp=now, - added={'a': 1}, - modified={'b': 2}, - removed=['c'], + added={"a": 1}, + modified={"b": 2}, + removed=["c"], ), want_err=None, ), TestCase( - name='connection status', + name="connection status", give_type=dt.events.CONNECTION_STATUS, give_data=dt.events.ConnectionStatus( timestamp=now, - connection='ETHERNET', - available=['ETHERNET', 'CELLULAR'], + connection="ETHERNET", + available=["ETHERNET", "CELLULAR"], ), want_err=None, ), TestCase( - name='ethernet status', + name="ethernet status", give_type=dt.events.ETHERNET_STATUS, give_data=dt.events.EthernetStatus( timestamp=now, - mac_address='abc', - ip_address='123', + mac_address="abc", + ip_address="123", ), want_err=None, ), TestCase( - name='cellular status', + name="cellular status", give_type=dt.events.CELLULAR_STATUS, give_data=dt.events.CellularStatus( timestamp=now, @@ -233,7 +233,7 @@ class TestCase: want_err=None, ), TestCase( - name='co2', + name="co2", give_type=dt.events.CO2, give_data=dt.events.Co2( timestamp=now, @@ -242,7 +242,7 @@ class TestCase: want_err=None, ), TestCase( - name='pressure', + name="pressure", give_type=dt.events.PRESSURE, give_data=dt.events.Pressure( timestamp=now, @@ -251,29 +251,29 @@ class TestCase: want_err=None, ), TestCase( - name='motion', + name="motion", give_type=dt.events.MOTION, give_data=dt.events.Motion( timestamp=now, - state='NO_MOTION_DETECTED', + state="NO_MOTION_DETECTED", ), want_err=None, ), TestCase( - name='contact', + name="contact", give_type=dt.events.CONTACT, give_data=dt.events.Contact( timestamp=now, - state='OPEN', + state="OPEN", ), want_err=None, ), TestCase( - name='probeWireStatus', + name="probeWireStatus", give_type=dt.events.PROBE_WIRE_STATUS, give_data=dt.events.ProbeWireStatus( timestamp=now, - state='THREE_WIRE', + state="THREE_WIRE", ), want_err=None, ), @@ -281,13 +281,13 @@ class TestCase: i = 0 for test in tests: - args['data'] = test.give_data + args["data"] = test.give_data if test.want_err is None: dt.Emulator.publish_event(**args) request_mock.assert_requested( - method='POST', + method="POST", url=url, body={test.give_type: test.give_data._raw}, ) diff --git a/tests/test_errors.py b/tests/test_errors.py index 1a684ca..56866b7 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -4,15 +4,14 @@ import disruptive.errors as errors -class TestResponseStatusCodes(): - +class TestResponseStatusCodes: def test_error_code_400(self, request_mock): # Set response status code to represent test. request_mock.status_code = 400 # Try to authenticate. This sends a POST request internally. with pytest.raises(errors.BadRequest): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert no retries. request_mock.assert_request_count(1) @@ -23,7 +22,7 @@ def test_error_code_401(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.Unauthorized): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # 401 should retry once. request_mock.assert_request_count(3) @@ -34,7 +33,7 @@ def test_error_code_403(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.Forbidden): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert no retries. request_mock.assert_request_count(1) @@ -45,7 +44,7 @@ def test_error_code_404(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.NotFound): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert no retries. request_mock.assert_request_count(1) @@ -56,7 +55,7 @@ def test_error_code_409(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.Conflict): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert no retries. request_mock.assert_request_count(1) @@ -67,7 +66,7 @@ def test_error_code_429(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.TooManyRequests): - dt.Device.get_device('', '') + dt.Device.get_device("", "") def test_error_code_500(self, request_mock): # Set response status code to represent test. @@ -75,7 +74,7 @@ def test_error_code_500(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.InternalServerError): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert expected retry attempts. request_mock.assert_request_count(dt.request_attempts + 1) @@ -86,7 +85,7 @@ def test_error_code_503(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.InternalServerError): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert expected retry attempts. request_mock.assert_request_count(dt.request_attempts + 1) @@ -97,7 +96,7 @@ def test_error_code_504(self, request_mock): # Call the service, which will send a request to the server. with pytest.raises(errors.InternalServerError): - dt.Device.get_device('', '') + dt.Device.get_device("", "") # Assert expected retry attempts. request_mock.assert_request_count(dt.request_attempts + 1) @@ -106,26 +105,26 @@ def test_server_error_group(self, request_mock): for code in [500, 503, 504]: request_mock.status_code = code with pytest.raises(errors.ServerError): - dt.Device.get_device('') + dt.Device.get_device("") def test_usage_error_group(self, request_mock): for code in [400, 401, 403, 404, 409, 429]: request_mock.status_code = code with pytest.raises(errors.UsageError): - dt.Device.get_device('') + dt.Device.get_device("") with pytest.raises(errors.UsageError): dt.EventHistory.list_events( - device_id='device_id', - project_id='project_id', - start_time='XXXX-01-01T00:00:00Z', + device_id="device_id", + project_id="project_id", + start_time="XXXX-01-01T00:00:00Z", ) old_value = dt.request_attempts dt.request_attempts = -1 with pytest.raises(errors.UsageError): - dt.Device.get_device('', '') + dt.Device.get_device("", "") dt.request_attempts = old_value with pytest.raises(errors.UsageError): - dt.Auth.service_account('', '', '') + dt.Auth.service_account("", "", "") diff --git a/tests/test_eventhistory.py b/tests/test_eventhistory.py index a1f6b44..8414cfa 100644 --- a/tests/test_eventhistory.py +++ b/tests/test_eventhistory.py @@ -8,8 +8,7 @@ import tests.api_responses as dtapiresponses -class TestEventHistory(): - +class TestEventHistory: def test_list_events(self, request_mock): # Update the response data with event history data. res = dtapiresponses.event_history_each_type @@ -17,24 +16,24 @@ def test_list_events(self, request_mock): # Call EventHistory.list_events() method. h = disruptive.EventHistory.list_events( - device_id='device_id', - project_id='project_id', - event_types=['temperature', 'touch'], - start_time='1970-01-01T00:00:00Z', - end_time='1970-01-02T00:00:00Z', + device_id="device_id", + project_id="project_id", + event_types=["temperature", "touch"], + start_time="1970-01-01T00:00:00Z", + end_time="1970-01-02T00:00:00Z", ) # Verify expected outgoing parameters in request. url = disruptive.base_url - url += '/projects/project_id/devices/device_id/events' + url += "/projects/project_id/devices/device_id/events" request_mock.assert_requested( - method='GET', + method="GET", url=url, params={ - 'eventTypes': ['temperature', 'touch'], - 'startTime': '1970-01-01T00:00:00Z', - 'endTime': '1970-01-02T00:00:00Z', - } + "eventTypes": ["temperature", "touch"], + "startTime": "1970-01-01T00:00:00Z", + "endTime": "1970-01-02T00:00:00Z", + }, ) # Assert single request sent. @@ -46,7 +45,7 @@ def test_list_events(self, request_mock): assert isinstance(e, Event) def test_to_pandas_polars(self, request_mock): - cols = ['device_id', 'event_id', 'event_type'] + cols = ["device_id", "event_id", "event_type"] @dataclass class TestCase: @@ -59,68 +58,81 @@ class TestCase: tests = [ TestCase( - name='none installed', + name="none installed", pandas_installed=False, polars_installed=False, - give_events=disruptive.EventHistory([ - disruptive.events.Event(dtapiresponses.touch_event), - ]), - want_cols=cols + ['update_time'], + give_events=disruptive.EventHistory( + [ + disruptive.events.Event(dtapiresponses.touch_event), + ] + ), + want_cols=cols + ["update_time"], want_len=1, ), TestCase( - name='pandas installed', + name="pandas installed", pandas_installed=True, polars_installed=False, - give_events=disruptive.EventHistory([ - disruptive.events.Event(dtapiresponses.touch_event), - ]), - want_cols=cols + ['update_time'], + give_events=disruptive.EventHistory( + [ + disruptive.events.Event(dtapiresponses.touch_event), + ] + ), + want_cols=cols + ["update_time"], want_len=1, ), TestCase( - name='none installed', + name="none installed", pandas_installed=False, polars_installed=False, - give_events=disruptive.EventHistory([ - disruptive.events.Event(dtapiresponses.touch_event), - ]), + give_events=disruptive.EventHistory( + [ + disruptive.events.Event(dtapiresponses.touch_event), + ] + ), want_cols=cols, want_len=0, ), TestCase( - name='no events in response', + name="no events in response", pandas_installed=True, polars_installed=True, - give_events=disruptive.EventHistory([ - ]), + give_events=disruptive.EventHistory([]), want_cols=[], want_len=0, ), TestCase( - name='touch events', + name="touch events", pandas_installed=True, polars_installed=True, - give_events=disruptive.EventHistory([ - disruptive.events.Event(dtapiresponses.touch_event), - disruptive.events.Event(dtapiresponses.touch_event), - disruptive.events.Event(dtapiresponses.touch_event), - ]), - want_cols=cols + ['update_time'], + give_events=disruptive.EventHistory( + [ + disruptive.events.Event(dtapiresponses.touch_event), + disruptive.events.Event(dtapiresponses.touch_event), + disruptive.events.Event(dtapiresponses.touch_event), + ] + ), + want_cols=cols + ["update_time"], want_len=3, ), TestCase( - name='touch + temperature events', + name="touch + temperature events", pandas_installed=True, polars_installed=True, - give_events=disruptive.EventHistory([ - disruptive.events.Event(dtapiresponses.touch_event), - disruptive.events.Event(dtapiresponses.touch_event), - disruptive.events.Event(dtapiresponses.temperature_event), - disruptive.events.Event(dtapiresponses.temperature_event), - disruptive.events.Event(dtapiresponses.touch_event), - ]), - want_cols=cols + ['update_time', 'sample_time', 'value'], + give_events=disruptive.EventHistory( + [ + disruptive.events.Event(dtapiresponses.touch_event), + disruptive.events.Event(dtapiresponses.touch_event), + disruptive.events.Event( + dtapiresponses.temperature_event + ), + disruptive.events.Event( + dtapiresponses.temperature_event + ), + disruptive.events.Event(dtapiresponses.touch_event), + ] + ), + want_cols=cols + ["update_time", "sample_time", "value"], want_len=5, ), ] @@ -133,7 +145,7 @@ class TestCase: for col in test.want_cols: assert col in df_pandas.columns else: - patch = 'builtins.__import__' + patch = "builtins.__import__" with mock.patch(patch, side_effect=ModuleNotFoundError): with pytest.raises(ModuleNotFoundError): test.give_events.to_pandas() @@ -145,7 +157,7 @@ class TestCase: for col in test.want_cols: assert col in df_polars.columns else: - patch = 'builtins.__import__' + patch = "builtins.__import__" with mock.patch(patch, side_effect=ModuleNotFoundError): with pytest.raises(ModuleNotFoundError): test.give_events.to_polars() diff --git a/tests/test_events.py b/tests/test_events.py index e3b374c..1640b50 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -4,8 +4,7 @@ import disruptive -class TestEvents(): - +class TestEvents: def test_touch(self): x = disruptive.events.Touch( timestamp=datetime.now(), @@ -26,7 +25,7 @@ def test_temperature(self): def test_object_present(self): x = disruptive.events.ObjectPresent( - state='PRESENT', + state="PRESENT", timestamp=datetime.now(), ) @@ -87,7 +86,7 @@ def test_touch_count(self): def test_water_present(self): x = disruptive.events.WaterPresent( - state='NOT_PRESENT', + state="NOT_PRESENT", timestamp=datetime.now(), ) @@ -98,10 +97,10 @@ def test_network_status(self): x = disruptive.events.NetworkStatus( signal_strength=73, rssi=22, - transmission_mode='LOW_POWER_STANDARD_MODE', + transmission_mode="LOW_POWER_STANDARD_MODE", cloud_connectors=[ disruptive.events.NetworkStatusCloudConnector( - device_id='123', + device_id="123", signal_strength=73, rssi=22, ), @@ -123,9 +122,9 @@ def test_battery_status(self): def test_labels_changed(self): x = disruptive.events.LabelsChanged( - added={'key1': 'value1', 'key2': 'value2'}, - modified={'key3': 'value3'}, - removed=['key4'], + added={"key1": "value1", "key2": "value2"}, + modified={"key3": "value3"}, + removed=["key4"], timestamp=datetime.now(), ) @@ -134,8 +133,8 @@ def test_labels_changed(self): def test_connection_status(self): x = disruptive.events.ConnectionStatus( - connection='ETHERNET', - available=['ETHERNET', 'CELLULAR'], + connection="ETHERNET", + available=["ETHERNET", "CELLULAR"], timestamp=datetime.now(), ) @@ -144,8 +143,8 @@ def test_connection_status(self): def test_ethernet_status(self): x = disruptive.events.EthernetStatus( - mac_address='123', - ip_address='abc', + mac_address="123", + ip_address="abc", timestamp=datetime.now(), ) @@ -181,7 +180,7 @@ def test_pressure(self): def test_motion(self): x = disruptive.events.Motion( - state='NO_MOTION_DETECTED', + state="NO_MOTION_DETECTED", timestamp=datetime.now(), ) @@ -190,9 +189,9 @@ def test_motion(self): def test_desk_occupancy(self): x = disruptive.events.DeskOccupancy( - state='OCCUPIED', + state="OCCUPIED", timestamp=datetime.now(), - remarks=['INCOMPLETE_DATA'], + remarks=["INCOMPLETE_DATA"], ) y = eval(repr(x)) @@ -207,8 +206,8 @@ class TestCase: timestamp: datetime testcases = [ - TestCase(state='OPEN', timestamp=now), - TestCase(state='CLOSED', timestamp=now), + TestCase(state="OPEN", timestamp=now), + TestCase(state="CLOSED", timestamp=now), ] for testcase in testcases: @@ -229,10 +228,10 @@ class TestCase: timestamp: datetime testcases = [ - TestCase(state='INVALID_WIRE_CONFIGURATION', timestamp=now), - TestCase(state='TWO_WIRE', timestamp=now), - TestCase(state='THREE_WIRE', timestamp=now), - TestCase(state='FOUR_WIRE', timestamp=now), + TestCase(state="INVALID_WIRE_CONFIGURATION", timestamp=now), + TestCase(state="TWO_WIRE", timestamp=now), + TestCase(state="THREE_WIRE", timestamp=now), + TestCase(state="FOUR_WIRE", timestamp=now), ] for testcase in testcases: diff --git a/tests/test_logging.py b/tests/test_logging.py index 2912273..c1c8eb6 100644 --- a/tests/test_logging.py +++ b/tests/test_logging.py @@ -6,69 +6,74 @@ import disruptive.errors as dterrors -class TestLogging(): - - def _check_level_called(self, msg, debug=True, info=True, - warning=True, error=True, critical=True): - - with patch('disruptive.logging._fmt_log') as log_mock: +class TestLogging: + def _check_level_called( + self, + msg, + debug=True, + info=True, + warning=True, + error=True, + critical=True, + ): + with patch("disruptive.logging._fmt_log") as log_mock: dtlog.debug(msg) if debug: - log_mock.assert_called_with(msg, 'DEBUG') + log_mock.assert_called_with(msg, "DEBUG") else: assert log_mock.call_count == 0 - with patch('disruptive.logging._fmt_log') as log_mock: + with patch("disruptive.logging._fmt_log") as log_mock: dtlog.info(msg) if info: - log_mock.assert_called_with(msg, 'INFO') + log_mock.assert_called_with(msg, "INFO") else: assert log_mock.call_count == 0 - with patch('disruptive.logging._fmt_log') as log_mock: + with patch("disruptive.logging._fmt_log") as log_mock: dtlog.warning(msg) if warning: - log_mock.assert_called_with(msg, 'WARNING') + log_mock.assert_called_with(msg, "WARNING") else: assert log_mock.call_count == 0 - with patch('disruptive.logging._fmt_log') as log_mock: + with patch("disruptive.logging._fmt_log") as log_mock: dtlog.error(msg) if error: - log_mock.assert_called_with(msg, 'ERROR') + log_mock.assert_called_with(msg, "ERROR") else: assert log_mock.call_count == 0 - with patch('disruptive.logging._fmt_log') as log_mock: + with patch("disruptive.logging._fmt_log") as log_mock: dtlog.critical(msg) if critical: - log_mock.assert_called_with(msg, 'CRITICAL') + log_mock.assert_called_with(msg, "CRITICAL") else: assert log_mock.call_count == 0 def test_flag_debug(self): - disruptive.log_level = 'debug' - self._check_level_called('Test message.') + disruptive.log_level = "debug" + self._check_level_called("Test message.") disruptive.log_level = None def test_flag_info(self): - disruptive.log_level = 'info' - self._check_level_called('Test message.', debug=False) + disruptive.log_level = "info" + self._check_level_called("Test message.", debug=False) disruptive.log_level = None def test_flag_warning(self): - disruptive.log_level = 'warning' + disruptive.log_level = "warning" self._check_level_called( - msg='Test message.', + msg="Test message.", debug=False, info=False, ) disruptive.log_level = None def test_flag_error(self): - disruptive.log_level = 'error' + disruptive.log_level = "error" self._check_level_called( - msg='Test message.', + msg="Test message.", debug=False, info=False, warning=False, @@ -76,9 +81,9 @@ def test_flag_error(self): disruptive.log_level = None def test_flag_critical(self): - disruptive.log_level = 'critical' + disruptive.log_level = "critical" self._check_level_called( - msg='Test message.', + msg="Test message.", debug=False, info=False, warning=False, @@ -89,7 +94,7 @@ def test_flag_critical(self): def test_case_insensitive(self): disruptive.log_level = "CRITICAL" self._check_level_called( - msg='Test message.', + msg="Test message.", debug=False, info=False, warning=False, @@ -101,7 +106,7 @@ def test_invalid_level_reset(self): disruptive.log_level = "SOME_INVALID_STRING" with pytest.raises(dterrors.ConfigurationError): self._check_level_called( - msg='Test message.', + msg="Test message.", debug=False, info=False, warning=False, @@ -110,6 +115,6 @@ def test_invalid_level_reset(self): ) # Log level should be reset to default. - assert disruptive.log_level == 'info' + assert disruptive.log_level == "info" disruptive.log_level = None diff --git a/tests/test_organization.py b/tests/test_organization.py index 28b9081..19acd54 100644 --- a/tests/test_organization.py +++ b/tests/test_organization.py @@ -2,8 +2,7 @@ import tests.api_responses as dtapiresponses -class TestOrganization(): - +class TestOrganization: def test_repr(self, request_mock): # Update the response data with organization data. res = dtapiresponses.organization @@ -11,7 +10,7 @@ def test_repr(self, request_mock): # Fetch an organization. x = disruptive.Organization.get_organization( - organization_id='organization_id', + organization_id="organization_id", ) # Evaluate __repr__ function and compare copy. @@ -24,23 +23,23 @@ def test_unpack(self, request_mock): request_mock.json = res # Call the appropriate endpoint. - o = disruptive.Organization.get_organization('organization_id') + o = disruptive.Organization.get_organization("organization_id") # Assert attributes unpacked correctly. - assert o.organization_id == res['name'].split('/')[-1] - assert o.display_name == res['displayName'] + assert o.organization_id == res["name"].split("/")[-1] + assert o.display_name == res["displayName"] def test_get_organization(self, request_mock): # Update the response data with organization data. request_mock.json = dtapiresponses.organization # Call the appropriate endpoint. - o = disruptive.Organization.get_organization('organization_id') + o = disruptive.Organization.get_organization("organization_id") # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/organizations/organization_id', + method="GET", + url=disruptive.base_url + "/organizations/organization_id", ) # Assert single request sent. @@ -58,8 +57,8 @@ def test_list_organizations(self, request_mock): # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/organizations', + method="GET", + url=disruptive.base_url + "/organizations", ) # Assert single request sent. @@ -74,12 +73,12 @@ def test_list_members(self, request_mock): request_mock.json = dtapiresponses.members # Call the appropriate endpoint - members = disruptive.Organization.list_members('org_id') + members = disruptive.Organization.list_members("org_id") # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/organizations/org_id/members', + method="GET", + url=disruptive.base_url + "/organizations/org_id/members", ) # Assert single request sent. @@ -95,19 +94,19 @@ def test_add_member(self, request_mock): # Call the appropriate endpoint member = disruptive.Organization.add_member( - organization_id='org_id', - email='service_account_email@domain.com', - roles=['organization.admin'], + organization_id="org_id", + email="service_account_email@domain.com", + roles=["organization.admin"], ) # Verify request parameters. request_mock.assert_requested( - method='POST', - url=disruptive.base_url+'/organizations/org_id/members', + method="POST", + url=disruptive.base_url + "/organizations/org_id/members", body={ - 'roles': ['roles/organization.admin'], - 'email': 'service_account_email@domain.com', - } + "roles": ["roles/organization.admin"], + "email": "service_account_email@domain.com", + }, ) # Assert single request sent. @@ -122,14 +121,15 @@ def test_get_member(self, request_mock): # Call the appropriate endpoint member = disruptive.Organization.get_member( - organization_id='org_id', - member_id='member_id', + organization_id="org_id", + member_id="member_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/organizations/org_id/members/member_id', + method="GET", + url=disruptive.base_url + + "/organizations/org_id/members/member_id", ) # Assert single request sent. @@ -144,14 +144,15 @@ def test_remove_member(self, request_mock): # Call the appropriate endpoint response = disruptive.Organization.remove_member( - organization_id='org_id', - member_id='member_id', + organization_id="org_id", + member_id="member_id", ) # Verify request parameters. request_mock.assert_requested( - method='DELETE', - url=disruptive.base_url+'/organizations/org_id/members/member_id', + method="DELETE", + url=disruptive.base_url + + "/organizations/org_id/members/member_id", ) # Assert single request sent. @@ -162,27 +163,28 @@ def test_remove_member(self, request_mock): def test_get_member_invite_url(self, request_mock): # Update the response with an email string. - res = {'inviteUrl': 'some-email@domain.com'} + res = {"inviteUrl": "some-email@domain.com"} request_mock.json = res # Call the appropriate endpoint response = disruptive.Organization.get_member_invite_url( - organization_id='org_id', - member_id='member_id', + organization_id="org_id", + member_id="member_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/organizations/org_id/members/' - + 'member_id:getInviteUrl', + method="GET", + url=disruptive.base_url + + "/organizations/org_id/members/" + + "member_id:getInviteUrl", ) # Assert single request sent. request_mock.assert_request_count(1) # Assert output matches response content. - assert response == res['inviteUrl'] + assert response == res["inviteUrl"] def test_list_permissions(self, request_mock): # Update the response with list of permissions. @@ -190,16 +192,16 @@ def test_list_permissions(self, request_mock): # Call the appropriate endpoint response = disruptive.Organization.list_permissions( - organization_id='org_id', + organization_id="org_id", ) # Assert output is of type list. - assert type(response) == list + assert isinstance(response, list) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/organizations/org_id/permissions' + method="GET", + url=disruptive.base_url + "/organizations/org_id/permissions", ) # Assert single request sent. @@ -207,4 +209,4 @@ def test_list_permissions(self, request_mock): # Assert only strings in output list. for permission in response: - assert type(permission) == str + assert isinstance(permission, str) diff --git a/tests/test_outputs.py b/tests/test_outputs.py index a21335e..d09001a 100644 --- a/tests/test_outputs.py +++ b/tests/test_outputs.py @@ -6,14 +6,13 @@ import tests.api_responses as dtapiresponses -class TestOutputs(): - +class TestOutputs: def test_str_dunder(self): # Fetch some events. history = dtapiresponses.event_history_each_type # Iterate events in history. - for event in history['events']: + for event in history["events"]: # Construct event object. obj = disruptive.events.Event(event) @@ -25,7 +24,7 @@ def test_repr_dunder_eval(self): history = dtapiresponses.event_history_each_type # Iterate events in history. - for event in history['events']: + for event in history["events"]: # Construct event object. obj = disruptive.events.Event(event) @@ -43,7 +42,7 @@ class TestCase: tests = [ TestCase( - name='success', + name="success", give_raw=dtapiresponses.user_member, ), ] @@ -53,14 +52,14 @@ class TestCase: raw = test.give_raw - assert member.member_id == raw['name'].split('/')[-1] - assert member.display_name == raw['displayName'] - assert member.email == raw['email'] - assert member.roles == [r.split('/')[-1] for r in raw['roles']] - assert member.status == raw['status'] - assert member.email == raw['email'] - assert member.account_type == raw['accountType'] - assert member.create_time == dttrans.to_datetime(raw['createTime']) + assert member.member_id == raw["name"].split("/")[-1] + assert member.display_name == raw["displayName"] + assert member.email == raw["email"] + assert member.roles == [r.split("/")[-1] for r in raw["roles"]] + assert member.status == raw["status"] + assert member.email == raw["email"] + assert member.account_type == raw["accountType"] + assert member.create_time == dttrans.to_datetime(raw["createTime"]) def test_raw_attribute(self, request_mock): @dataclass @@ -73,70 +72,70 @@ class TestCase: tests = [ TestCase( - name='data connector raw', + name="data connector raw", give_method=disruptive.DataConnector.get_data_connector, give_params={ - 'data_connector_id': '-', - 'project_id': '-', + "data_connector_id": "-", + "project_id": "-", }, give_req=dtapiresponses.simple_data_connector, want_err=None, ), TestCase( - name='device raw', + name="device raw", give_method=disruptive.Device.get_device, - give_params={'device_id': '-'}, + give_params={"device_id": "-"}, give_req=dtapiresponses.touch_sensor, want_err=None, ), TestCase( - name='organization raw', + name="organization raw", give_method=disruptive.Organization.get_organization, - give_params={'organization_id': '-'}, + give_params={"organization_id": "-"}, give_req=dtapiresponses.organization, want_err=None, ), TestCase( - name='member raw', + name="member raw", give_method=disruptive.Organization.get_member, give_params={ - 'member_id': '-', - 'organization_id': '-', + "member_id": "-", + "organization_id": "-", }, give_req=dtapiresponses.user_member, want_err=None, ), TestCase( - name='project raw', + name="project raw", give_method=disruptive.Project.get_project, - give_params={'project_id': '-'}, + give_params={"project_id": "-"}, give_req=dtapiresponses.small_project, want_err=None, ), TestCase( - name='role raw', + name="role raw", give_method=disruptive.Role.get_role, - give_params={'role': '-'}, + give_params={"role": "-"}, give_req=dtapiresponses.project_user_role, want_err=None, ), TestCase( - name='service account raw', + name="service account raw", give_method=disruptive.ServiceAccount.get_service_account, give_params={ - 'service_account_id': '-', - 'project_id': '-', + "service_account_id": "-", + "project_id": "-", }, give_req=dtapiresponses.service_account1, want_err=None, ), TestCase( - name='service account key raw', + name="service account key raw", give_method=disruptive.ServiceAccount.get_key, give_params={ - 'key_id': '-', - 'service_account_id': '-', - 'project_id': '-', + "key_id": "-", + "service_account_id": "-", + "project_id": "-", }, give_req=dtapiresponses.key_without_secret, want_err=None, diff --git a/tests/test_project.py b/tests/test_project.py index f58e99f..e2edf1a 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -2,8 +2,7 @@ import tests.api_responses as dtapiresponses -class TestProject(): - +class TestProject: def test_repr(self, request_mock): # Update the response data with project data. res = dtapiresponses.small_project @@ -11,7 +10,7 @@ def test_repr(self, request_mock): # Fetch a project. x = disruptive.Project.get_project( - project_id='project_id', + project_id="project_id", ) # Evaluate __repr__ function and compare copy. @@ -24,27 +23,27 @@ def test_unpack(self, request_mock): request_mock.json = res # Call the appropriate endpoint. - p = disruptive.Project.get_project('project_id') + p = disruptive.Project.get_project("project_id") # Assert attributes unpacked correctly. - assert p.project_id == res['name'].split('/')[-1] - assert p.display_name == res['displayName'] - assert p.organization_id == res['organization'].split('/')[-1] - assert p.organization_display_name == res['organizationDisplayName'] - assert p.sensor_count == res['sensorCount'] - assert p.cloud_connector_count == res['cloudConnectorCount'] + assert p.project_id == res["name"].split("/")[-1] + assert p.display_name == res["displayName"] + assert p.organization_id == res["organization"].split("/")[-1] + assert p.organization_display_name == res["organizationDisplayName"] + assert p.sensor_count == res["sensorCount"] + assert p.cloud_connector_count == res["cloudConnectorCount"] def test_get_project(self, request_mock): # Update the response data with project data. request_mock.json = dtapiresponses.small_project # Call the appropriate endpoint. - p = disruptive.Project.get_project('project_id') + p = disruptive.Project.get_project("project_id") # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id', + method="GET", + url=disruptive.base_url + "/projects/project_id", ) # Assert single request sent. @@ -62,8 +61,8 @@ def test_list_projects(self, request_mock): # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects', + method="GET", + url=disruptive.base_url + "/projects", ) # Assert single request sent. @@ -78,13 +77,13 @@ def test_create_project(self, request_mock): request_mock.json = dtapiresponses.empty_project # Call the appropriate endpoint. - p = disruptive.Project.create_project('org', 'name') + p = disruptive.Project.create_project("org", "name") # Verify request parameters. request_mock.assert_requested( - method='POST', - url=disruptive.base_url+'/projects', - body={'organization': 'organizations/org', 'displayName': 'name'}, + method="POST", + url=disruptive.base_url + "/projects", + body={"organization": "organizations/org", "displayName": "name"}, ) # Assert single request sent. @@ -98,13 +97,13 @@ def test_update_project(self, request_mock): request_mock.json = dtapiresponses.empty_project # Call the appropriate endpoint. - output = disruptive.Project.update_project('project_id', 'new-name') + output = disruptive.Project.update_project("project_id", "new-name") # Verify request parameters. request_mock.assert_requested( - method='PATCH', - url=disruptive.base_url+'/projects/project_id', - body={'displayName': 'new-name'}, + method="PATCH", + url=disruptive.base_url + "/projects/project_id", + body={"displayName": "new-name"}, ) # Assert single request sent. @@ -115,12 +114,12 @@ def test_update_project(self, request_mock): def test_delete_project(self, request_mock): # Call the appropriate endpoint. - output = disruptive.Project.delete_project('project_id') + output = disruptive.Project.delete_project("project_id") # Verify request parameters. request_mock.assert_requested( - method='DELETE', - url=disruptive.base_url+'/projects/project_id', + method="DELETE", + url=disruptive.base_url + "/projects/project_id", ) # Assert single request sent. @@ -134,12 +133,12 @@ def test_list_members(self, request_mock): request_mock.json = dtapiresponses.members # Call the appropriate endpoint - members = disruptive.Project.list_members('project_id') + members = disruptive.Project.list_members("project_id") # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/members', + method="GET", + url=disruptive.base_url + "/projects/project_id/members", ) # Assert single request sent. @@ -156,19 +155,19 @@ def test_add_member(self, request_mock): # Call the appropriate endpoint member = disruptive.Project.add_member( - project_id='project_id', - email='service_account_email@domain.com', - roles=['project.developer'], + project_id="project_id", + email="service_account_email@domain.com", + roles=["project.developer"], ) # Verify request parameters. request_mock.assert_requested( - method='POST', - url=disruptive.base_url+'/projects/project_id/members', + method="POST", + url=disruptive.base_url + "/projects/project_id/members", body={ - 'roles': ['roles/project.developer'], - 'email': 'service_account_email@domain.com', - } + "roles": ["roles/project.developer"], + "email": "service_account_email@domain.com", + }, ) # Assert single request sent. @@ -183,14 +182,14 @@ def test_get_member(self, request_mock): # Call the appropriate endpoint member = disruptive.Project.get_member( - project_id='project_id', - member_id='member_id', + project_id="project_id", + member_id="member_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/members/member_id', + method="GET", + url=disruptive.base_url + "/projects/project_id/members/member_id", ) # Assert single request sent. @@ -205,16 +204,16 @@ def test_update_member(self, request_mock): # Call the appropriate endpoint member = disruptive.Project.update_member( - project_id='project_id', - member_id='member_id', - roles=['project.developer'], + project_id="project_id", + member_id="member_id", + roles=["project.developer"], ) # Verify request parameters. request_mock.assert_requested( - method='PATCH', - url=disruptive.base_url+'/projects/project_id/members/member_id', - body={'roles': ['roles/project.developer']} + method="PATCH", + url=disruptive.base_url + "/projects/project_id/members/member_id", + body={"roles": ["roles/project.developer"]}, ) # Assert single request sent. @@ -229,14 +228,14 @@ def test_remove_member(self, request_mock): # Call the appropriate endpoint response = disruptive.Project.remove_member( - project_id='project_id', - member_id='member_id', + project_id="project_id", + member_id="member_id", ) # Verify request parameters. request_mock.assert_requested( - method='DELETE', - url=disruptive.base_url+'/projects/project_id/members/member_id', + method="DELETE", + url=disruptive.base_url + "/projects/project_id/members/member_id", ) # Assert single request sent. @@ -247,27 +246,28 @@ def test_remove_member(self, request_mock): def test_get_member_invite_url(self, request_mock): # Update the response with an email string. - res = {'inviteUrl': 'some-email@domain.com'} + res = {"inviteUrl": "some-email@domain.com"} request_mock.json = res # Call the appropriate endpoint response = disruptive.Project.get_member_invite_url( - project_id='project_id', - member_id='member_id', + project_id="project_id", + member_id="member_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/members/' - + 'member_id:getInviteUrl', + method="GET", + url=disruptive.base_url + + "/projects/project_id/members/" + + "member_id:getInviteUrl", ) # Assert single request sent. request_mock.assert_request_count(1) # Assert output matches response content. - assert response == res['inviteUrl'] + assert response == res["inviteUrl"] def test_list_permissions(self, request_mock): # Update the response with list of permissions. @@ -275,16 +275,16 @@ def test_list_permissions(self, request_mock): # Call the appropriate endpoint response = disruptive.Project.list_permissions( - project_id='project_id', + project_id="project_id", ) # Assert output is of type list. - assert type(response) == list + assert isinstance(response, list) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/permissions' + method="GET", + url=disruptive.base_url + "/projects/project_id/permissions", ) # Assert single request sent. @@ -292,4 +292,4 @@ def test_list_permissions(self, request_mock): # Assert only strings in output list. for permission in response: - assert type(permission) == str + assert isinstance(permission, str) diff --git a/tests/test_requests.py b/tests/test_requests.py index 577090b..7f2eed5 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -7,8 +7,7 @@ from tests.framework import RequestsReponseMock -class TestRequests(): - +class TestRequests: def test_first_recusion_depth_success(self, request_mock): def __patched_request(json, status_code, headers): return DTResponse(json, status_code, headers), None @@ -21,7 +20,7 @@ def __patched_request(json, status_code, headers): # using an iterable side_effect which advances each call. request_mock.request_patcher = request_mock._mocker.patch.object( DTRequest, - '_request_wrapper', + "_request_wrapper", side_effect=[ __patched_request(api_res, 200, {}), __patched_request({}, 500, {}), # <- This should not run. @@ -29,7 +28,7 @@ def __patched_request(json, status_code, headers): ], ) # Call disruptive.Device.get_device() to trigger the request chain. - device = disruptive.Device.get_device('project_id', 'device_id') + device = disruptive.Device.get_device("project_id", "device_id") # Verify that recursive loop executed only 1 time. request_mock.assert_request_count(1) @@ -49,7 +48,7 @@ def __patched_request(json, status_code, headers): # using an iterable side_effect which advances each call. request_mock.request_patcher = request_mock._mocker.patch.object( DTRequest, - '_request_wrapper', + "_request_wrapper", side_effect=[ __patched_request({}, 500, {}), __patched_request(api_res, 200, {}), @@ -57,7 +56,7 @@ def __patched_request(json, status_code, headers): ], ) # Call disruptive.Device.get_device() to trigger the request chain. - device = disruptive.Device.get_device('project_id', 'device_id') + device = disruptive.Device.get_device("project_id", "device_id") # Verify that recursive loop executed 2 times. request_mock.assert_request_count(2) @@ -77,7 +76,7 @@ def __patched_request(json, status_code, headers): # using an iterable side_effect which advances each call. request_mock.request_patcher = request_mock._mocker.patch.object( DTRequest, - '_request_wrapper', + "_request_wrapper", side_effect=[ __patched_request({}, 500, {}), __patched_request({}, 500, {}), @@ -85,7 +84,7 @@ def __patched_request(json, status_code, headers): ], ) # Call disruptive.Device.get_device() to trigger the request chain. - device = disruptive.Device.get_device('project_id', 'device_id') + device = disruptive.Device.get_device("project_id", "device_id") # Verify that recursive loop executed 3 times. request_mock.assert_request_count(3) @@ -114,12 +113,12 @@ def __patched_request(json, status_code, headers): # using an iterable side_effect which advances each call. request_mock.request_patcher = request_mock._mocker.patch.object( DTRequest, - '_request_wrapper', + "_request_wrapper", side_effect=side_effects, ) # Call disruptive.Device.get_device() to trigger the request chain. - device = disruptive.Device.get_device('project_id', 'device_id') + device = disruptive.Device.get_device("project_id", "device_id") # Verify recursive loop executed disruptive.request_attempts times. request_mock.assert_request_count(n) @@ -129,31 +128,31 @@ def __patched_request(json, status_code, headers): def test_method_propagation(self, request_mock): # Assert GET method propagates correctly. - DTRequest.get('/url') - request_mock.assert_requested('GET', disruptive.base_url+'/url') + DTRequest.get("/url") + request_mock.assert_requested("GET", disruptive.base_url + "/url") # Assert POST method propagates correctly. - DTRequest.post('/url') - request_mock.assert_requested('POST', disruptive.base_url+'/url') + DTRequest.post("/url") + request_mock.assert_requested("POST", disruptive.base_url + "/url") # Assert PATCH method propagates correctly. - DTRequest.patch('/url') - request_mock.assert_requested('PATCH', disruptive.base_url+'/url') + DTRequest.patch("/url") + request_mock.assert_requested("PATCH", disruptive.base_url + "/url") # Assert DELETE method propagates correctly. - DTRequest.delete('/url') - request_mock.assert_requested('DELETE', disruptive.base_url+'/url') + DTRequest.delete("/url") + request_mock.assert_requested("DELETE", disruptive.base_url + "/url") def test_pagination_early_exit(self, request_mock): # Create a response we will update the page-token off. def __res(page_token: str): return { - 'nextPageToken': page_token, - 'events': [ - history['events'][0], - history['events'][1], - history['events'][2], - ] + "nextPageToken": page_token, + "events": [ + history["events"][0], + history["events"][1], + history["events"][2], + ], } # Fetch some event history data. @@ -163,20 +162,20 @@ def __res(page_token: str): # The default one is constant, which we fix by # using an iterable side_effect which advances each call. request_mock.request_patcher = request_mock._mocker.patch( - 'requests.request', + "requests.request", side_effect=[ - RequestsReponseMock(__res('4'), 200, {}), - RequestsReponseMock(__res('3'), 200, {}), - RequestsReponseMock(__res(''), 200, {}), - RequestsReponseMock(__res('2'), 200, {}), # <- should not run - RequestsReponseMock(__res('1'), 200, {}), # <- should not run + RequestsReponseMock(__res("4"), 200, {}), + RequestsReponseMock(__res("3"), 200, {}), + RequestsReponseMock(__res(""), 200, {}), + RequestsReponseMock(__res("2"), 200, {}), # <- should not run + RequestsReponseMock(__res("1"), 200, {}), # <- should not run ], ) # Call eventhistory method which should paginate 3 times. _ = disruptive.EventHistory.list_events( - device_id='device_id', - project_id='project_id', + device_id="device_id", + project_id="project_id", ) # Verify it ran exactly 3 times. @@ -184,23 +183,23 @@ def __res(page_token: str): # The last request should have been made with page-token == '3'. url = disruptive.base_url - url += '/projects/project_id/devices/device_id/events' + url += "/projects/project_id/devices/device_id/events" request_mock.assert_requested( - method='GET', + method="GET", url=url, - params={'pageToken': '3'}, + params={"pageToken": "3"}, ) def test_pagination_max_depth(self, request_mock): # Create a response we will update the page-token off. def __res(page_token: str): return { - 'nextPageToken': page_token, - 'events': [ - history['events'][0], - history['events'][1], - history['events'][2], - ] + "nextPageToken": page_token, + "events": [ + history["events"][0], + history["events"][1], + history["events"][2], + ], } # Fetch some event history data. @@ -210,20 +209,20 @@ def __res(page_token: str): # The default one is constant, which we fix by # using an iterable side_effect which advances each call. request_mock.request_patcher = request_mock._mocker.patch( - 'requests.request', + "requests.request", side_effect=[ - RequestsReponseMock(__res('4'), 200, {}), - RequestsReponseMock(__res('3'), 200, {}), - RequestsReponseMock(__res('2'), 200, {}), - RequestsReponseMock(__res('1'), 200, {}), - RequestsReponseMock(__res(''), 200, {}), + RequestsReponseMock(__res("4"), 200, {}), + RequestsReponseMock(__res("3"), 200, {}), + RequestsReponseMock(__res("2"), 200, {}), + RequestsReponseMock(__res("1"), 200, {}), + RequestsReponseMock(__res(""), 200, {}), ], ) # Call eventhistory method which should paginate 5 times. _ = disruptive.EventHistory.list_events( - device_id='device_id', - project_id='project_id', + device_id="device_id", + project_id="project_id", ) # Verify it ran exactly 5 times. @@ -231,11 +230,11 @@ def __res(page_token: str): # The last request should have been made with page-token == '1'. url = disruptive.base_url - url += '/projects/project_id/devices/device_id/events' + url += "/projects/project_id/devices/device_id/events" request_mock.assert_requested( - method='GET', + method="GET", url=url, - params={'pageToken': '1'}, + params={"pageToken": "1"}, ) def test_timeout_override(self, request_mock): @@ -244,14 +243,14 @@ def test_timeout_override(self, request_mock): # Call Device.get_device(), overriden all defaults with kwargs. _ = disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_timeout=99, ) # Verify request were configured with new timeout. - url = disruptive.base_url + '/projects/-/devices/device_id' + url = disruptive.base_url + "/projects/-/devices/device_id" request_mock.assert_requested( - method='GET', + method="GET", url=url, timeout=99, ) @@ -264,7 +263,7 @@ def test_request_attempts_override(self, request_mock): with pytest.raises(disruptive.errors.InternalServerError): # Call Device.get_device() with overriden retry count. disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_attempts=99, ) @@ -276,7 +275,7 @@ def test_request_attempts_invalid(self, request_mock): with pytest.raises(disruptive.errors.ConfigurationError): # Call Device.get_device() with overriden retry count. disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_attempts=-1, ) @@ -285,14 +284,14 @@ def test_request_timeout_invalid(self, request_mock): with pytest.raises(disruptive.errors.ConfigurationError): # Call Device.get_device() with overriden retry count. disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_timeout=-1, ) def test_request_caught_requests_connection_error(self, request_mock): # Re-mock requests.request with a new side_effect. request_mock.request_patcher = request_mock._mocker.patch( - 'requests.request', + "requests.request", side_effect=requests.exceptions.ConnectionError, ) @@ -300,14 +299,14 @@ def test_request_caught_requests_connection_error(self, request_mock): with pytest.raises(disruptive.errors.ConnectionError): # Call Device.get_device(), overriden all defaults with kwargs. _ = disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_timeout=99, ) def test_request_caught_generic_requests_error(self, request_mock): # Re-mock requests.request with a new side_effect. request_mock.request_patcher = request_mock._mocker.patch( - 'requests.request', + "requests.request", side_effect=requests.exceptions.RequestException, ) @@ -315,14 +314,14 @@ def test_request_caught_generic_requests_error(self, request_mock): with pytest.raises(requests.exceptions.RequestException): # Call Device.get_device(), overriden all defaults with kwargs. _ = disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_timeout=99, ) def test_request_caught_value_error(self, request_mock): # Re-mock requests.request with a new side_effect. request_mock.request_patcher = request_mock._mocker.patch( - 'requests.request', + "requests.request", side_effect=requests.exceptions.RequestException, ) @@ -330,6 +329,6 @@ def test_request_caught_value_error(self, request_mock): with pytest.raises(requests.exceptions.RequestException): # Call Device.get_device(), overriden all defaults with kwargs. _ = disruptive.Device.get_device( - device_id='device_id', + device_id="device_id", request_timeout=99, ) diff --git a/tests/test_role.py b/tests/test_role.py index d6933eb..b79e18f 100644 --- a/tests/test_role.py +++ b/tests/test_role.py @@ -2,8 +2,7 @@ import tests.api_responses as dtapiresponses -class TestRole(): - +class TestRole: def test_repr(self, request_mock): # Update the response data with role data. res = dtapiresponses.project_user_role @@ -11,7 +10,7 @@ def test_repr(self, request_mock): # Fetch a role. x = disruptive.Role.get_role( - role='project_user', + role="project_user", ) # Evaluate __repr__ function and compare copy. @@ -24,25 +23,25 @@ def test_unpack(self, request_mock): request_mock.json = res # Call the appropriate endpoint. - p = disruptive.Role.get_role('project.user') + p = disruptive.Role.get_role("project.user") # Assert attributes unpacked correctly. - assert p.role == res['name'].split('/')[-1] - assert p.display_name == res['displayName'] - assert p.description == res['description'] - assert p.permissions == res['permissions'] + assert p.role == res["name"].split("/")[-1] + assert p.display_name == res["displayName"] + assert p.description == res["description"] + assert p.permissions == res["permissions"] def test_get_role(self, request_mock): # Update the response data with role data. request_mock.json = dtapiresponses.project_developer_role # Call the appropriate endpoint. - r = disruptive.Role.get_role('project.developer') + r = disruptive.Role.get_role("project.developer") # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/roles/project.developer', + method="GET", + url=disruptive.base_url + "/roles/project.developer", ) # Assert single request sent. @@ -60,8 +59,8 @@ def test_list_roles(self, request_mock): # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/roles', + method="GET", + url=disruptive.base_url + "/roles", ) # Assert single request sent. diff --git a/tests/test_service_account.py b/tests/test_service_account.py index a88664f..b5a9eea 100644 --- a/tests/test_service_account.py +++ b/tests/test_service_account.py @@ -4,8 +4,7 @@ from disruptive.resources.service_account import Key -class TestServiceAccount(): - +class TestServiceAccount: def test_repr(self, request_mock): # Update the response data with Service Account data. res = dtapiresponses.service_account1 @@ -13,8 +12,8 @@ def test_repr(self, request_mock): # Fetch a Service Account. x = disruptive.ServiceAccount.get_service_account( - service_account_id='service_account_id', - project_id='project_id', + service_account_id="service_account_id", + project_id="project_id", ) # Evaluate __repr__ function and compare copy. @@ -28,17 +27,17 @@ def test_unpack(self, request_mock): # Call the appropriate endpoint. s = disruptive.ServiceAccount.get_service_account( - 'service_account_id', - 'project_id', + "service_account_id", + "project_id", ) # Assert attributes unpacked correctly. - assert s.service_account_id == res['name'].split('/')[-1] - assert s.email == res['email'] - assert s.display_name == res['displayName'] - assert s.basic_auth_enabled == res['enableBasicAuth'] - assert s.create_time == dttrans.to_datetime(res['createTime']) - assert s.update_time == dttrans.to_datetime(res['updateTime']) + assert s.service_account_id == res["name"].split("/")[-1] + assert s.email == res["email"] + assert s.display_name == res["displayName"] + assert s.basic_auth_enabled == res["enableBasicAuth"] + assert s.create_time == dttrans.to_datetime(res["createTime"]) + assert s.update_time == dttrans.to_datetime(res["updateTime"]) def test_get_service_account(self, request_mock): # Update the response data with Service Account data. @@ -46,15 +45,16 @@ def test_get_service_account(self, request_mock): # Call the appropriate endpoint. s = disruptive.ServiceAccount.get_service_account( - 'service_account_id', - 'project_id', + "service_account_id", + "project_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id', + method="GET", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id", ) # Verify single request sent. @@ -69,13 +69,13 @@ def test_list_service_accounts(self, request_mock): # Call the appropriate endpoint sas = disruptive.ServiceAccount.list_service_accounts( - 'project_id', + "project_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/serviceaccounts', + method="GET", + url=disruptive.base_url + "/projects/project_id/serviceaccounts", ) # Verify single request sent. @@ -91,16 +91,16 @@ def test_create_service_account(self, request_mock): # Call the appropriate endpoint. s = disruptive.ServiceAccount.create_service_account( - 'project_id', - 'new-sa', + "project_id", + "new-sa", True, ) # Verify request parameters. request_mock.assert_requested( - method='POST', - url=disruptive.base_url+'/projects/project_id/serviceaccounts', - body={'displayName': 'new-sa', 'enableBasicAuth': True}, + method="POST", + url=disruptive.base_url + "/projects/project_id/serviceaccounts", + body={"displayName": "new-sa", "enableBasicAuth": True}, ) # Verify single request sent. @@ -115,9 +115,9 @@ def test_update_service_account(self, request_mock): # Call the appropriate endpoint. s = disruptive.ServiceAccount.update_service_account( - service_account_id='service_account_id', - project_id='project_id', - display_name='service-account-1', + service_account_id="service_account_id", + project_id="project_id", + display_name="service-account-1", basic_auth_enabled=False, ) @@ -126,10 +126,14 @@ def test_update_service_account(self, request_mock): # Verify request parameters. request_mock.assert_requested( - method='PATCH', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id', - body={'displayName': 'service-account-1', 'enableBasicAuth': False} + method="PATCH", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id", + body={ + "displayName": "service-account-1", + "enableBasicAuth": False, + }, ) # Assert attributes in output Device object. @@ -141,8 +145,8 @@ def test_delete_service_account(self, request_mock): # Call the appropriate endpoint. disruptive.ServiceAccount.delete_service_account( - service_account_id='service_account_id', - project_id='project_id', + service_account_id="service_account_id", + project_id="project_id", ) # Verify single request sent. @@ -150,9 +154,10 @@ def test_delete_service_account(self, request_mock): # Verify request parameters. request_mock.assert_requested( - method='DELETE', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id', + method="DELETE", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id", ) def test_key_attributes(self, request_mock): @@ -162,14 +167,14 @@ def test_key_attributes(self, request_mock): # Call the appropriate endpoint. k = disruptive.ServiceAccount.get_key( - 'service_account_id', - 'key_id', - 'project_id', + "service_account_id", + "key_id", + "project_id", ) # Assert attributes unpacked correctly. - assert k.key_id == res['name'].split('/')[-1] - assert k.create_time == dttrans.to_datetime(res['createTime']) + assert k.key_id == res["name"].split("/")[-1] + assert k.create_time == dttrans.to_datetime(res["createTime"]) assert k.secret is None def test_key_secret_set(self, request_mock): @@ -179,12 +184,12 @@ def test_key_secret_set(self, request_mock): # Call the appropriate endpoint. k = disruptive.ServiceAccount.create_key( - 'service_account_id', - 'project_id', + "service_account_id", + "project_id", ) # Assert attributes unpacked correctly. - assert k.secret == res['secret'] + assert k.secret == res["secret"] def test_get_key(self, request_mock): # Update the response data with Service Account data. @@ -192,16 +197,17 @@ def test_get_key(self, request_mock): # Call the appropriate endpoint. key = disruptive.ServiceAccount.get_key( - service_account_id='service_account_id', - key_id='key_id', - project_id='project_id', + service_account_id="service_account_id", + key_id="key_id", + project_id="project_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id/keys/key_id', + method="GET", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id/keys/key_id", ) # Verify single request sent. @@ -216,15 +222,16 @@ def test_list_keys(self, request_mock): # Call the appropriate endpoint. keys = disruptive.ServiceAccount.list_keys( - service_account_id='service_account_id', - project_id='project_id', + service_account_id="service_account_id", + project_id="project_id", ) # Verify request parameters. request_mock.assert_requested( - method='GET', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id/keys', + method="GET", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id/keys", ) # Verify single request sent. @@ -240,15 +247,16 @@ def test_create_key(self, request_mock): # Call the appropriate endpoint. key = disruptive.ServiceAccount.create_key( - service_account_id='service_account_id', - project_id='project_id', + service_account_id="service_account_id", + project_id="project_id", ) # Verify request parameters. request_mock.assert_requested( - method='POST', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id/keys', + method="POST", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id/keys", ) # Verify single request sent. @@ -263,16 +271,17 @@ def test_delete_key(self, request_mock): # Call the appropriate endpoint. disruptive.ServiceAccount.delete_key( - service_account_id='service_account_id', - key_id='key_id', - project_id='project_id', + service_account_id="service_account_id", + key_id="key_id", + project_id="project_id", ) # Verify request parameters. request_mock.assert_requested( - method='DELETE', - url=disruptive.base_url+'/projects/project_id/' - + 'serviceaccounts/service_account_id/keys/key_id', + method="DELETE", + url=disruptive.base_url + + "/projects/project_id/" + + "serviceaccounts/service_account_id/keys/key_id", ) # Verify single request sent. diff --git a/tests/test_stream.py b/tests/test_stream.py index 5773fe4..e9b136b 100644 --- a/tests/test_stream.py +++ b/tests/test_stream.py @@ -10,38 +10,35 @@ from disruptive.events import Event -class TestStream(): - +class TestStream: def test_event_stream_arguments(self, request_mock): - request_mock.iter_data = [ - dtapiresponses.stream_temperature_event - ] + request_mock.iter_data = [dtapiresponses.stream_temperature_event] # Call stream with customer kwargs. for _ in disruptive.Stream.event_stream( - project_id='project_id', - device_ids=['id1', 'id2', 'id3'], + project_id="project_id", + device_ids=["id1", "id2", "id3"], label_filters={ - 'l1': 'v1', - 'l2': None, + "l1": "v1", + "l2": None, }, - device_types=['temperature', 'touch'], - event_types=['temperature', 'touch'], + device_types=["temperature", "touch"], + event_types=["temperature", "touch"], request_attempts=9, ): pass url = disruptive.base_url - url += '/projects/project_id/devices:stream' + url += "/projects/project_id/devices:stream" request_mock.assert_requested( - method='GET', + method="GET", url=url, params={ - 'device_ids': ['id1', 'id2', 'id3'], - 'device_types': ['temperature', 'touch'], - 'label_filters': ['l1=v1', 'l2'], - 'event_types': ['temperature', 'touch'], - 'ping_interval': '10s', + "device_ids": ["id1", "id2", "id3"], + "device_types": ["temperature", "touch"], + "label_filters": ["l1=v1", "l2"], + "event_types": ["temperature", "touch"], + "ping_interval": "10s", }, stream=True, timeout=12, @@ -58,12 +55,12 @@ def test_ping(self, request_mock): ] # Mock logging function, which should trigger once for each ping. - with patch('disruptive.logging.debug') as log_mock: - for _ in disruptive.Stream.event_stream('project_id'): + with patch("disruptive.logging.debug") as log_mock: + for _ in disruptive.Stream.event_stream("project_id"): pass # Assert logging called with expected message. - log_mock.assert_called_with('Ping received.') + log_mock.assert_called_with("Ping received.") # debug() should have been called once per ping. assert log_mock.call_count == 5 @@ -74,22 +71,29 @@ def test_responses(self, request_mock): temp = dtapiresponses.stream_temperature_event nstat = dtapiresponses.stream_networkstatus_event request_mock.iter_data = [ - ping, temp, nstat, ping, temp, - nstat, ping, temp, nstat + ping, + temp, + nstat, + ping, + temp, + nstat, + ping, + temp, + nstat, ] # Convert bytes strings to expected responses. expected = [ - Event(json.loads(temp.decode('ascii'))['result']['event']), - Event(json.loads(nstat.decode('ascii'))['result']['event']), - Event(json.loads(temp.decode('ascii'))['result']['event']), - Event(json.loads(nstat.decode('ascii'))['result']['event']), - Event(json.loads(temp.decode('ascii'))['result']['event']), - Event(json.loads(nstat.decode('ascii'))['result']['event']), + Event(json.loads(temp.decode("ascii"))["result"]["event"]), + Event(json.loads(nstat.decode("ascii"))["result"]["event"]), + Event(json.loads(temp.decode("ascii"))["result"]["event"]), + Event(json.loads(nstat.decode("ascii"))["result"]["event"]), + Event(json.loads(temp.decode("ascii"))["result"]["event"]), + Event(json.loads(nstat.decode("ascii"))["result"]["event"]), ] # Start the stream. - for i, e in enumerate(disruptive.Stream.event_stream('project_id')): + for i, e in enumerate(disruptive.Stream.event_stream("project_id")): # Compare stream event to expected events. assert e._raw == expected[i]._raw @@ -104,8 +108,8 @@ def side_effect_override(**kwargs): with pytest.raises(dterrors.ReadTimeout): # Start a stream, which should rause an error causing retries. for _ in disruptive.Stream.event_stream( - project_id='project_id', - request_attempts=8): + project_id="project_id", request_attempts=8 + ): pass # Verify request is attempted the set number of times (+1). @@ -122,8 +126,8 @@ def side_effect_override(**kwargs): with pytest.raises(dterrors.ConnectionError): # Start a stream, which should rause an error causing retries. for _ in disruptive.Stream.event_stream( - project_id='project_id', - request_attempts=7): + project_id="project_id", request_attempts=7 + ): pass # Verify request is attempted the set number of times (+1). diff --git a/tests/test_transforms.py b/tests/test_transforms.py index 6b5e58e..0f1edb1 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -7,14 +7,13 @@ import disruptive.errors as dterrors -class TestTransforms(): - +class TestTransforms: def test_base64_encode(self): - outp = 'ZXhhbXBsZV9zdHJpbmc=' - assert dttrans.base64_encode('example_string') == outp + outp = "ZXhhbXBsZV9zdHJpbmc=" + assert dttrans.base64_encode("example_string") == outp def test_to_iso8601_invalid_type(self): - inp = {'timestamp': '1970-01-01T00:00:00Z'} + inp = {"timestamp": "1970-01-01T00:00:00Z"} with pytest.raises(TypeError): dttrans.to_iso8601(inp) @@ -24,38 +23,38 @@ def test_to_iso8601_none(self): assert dttrans.to_iso8601(inp) == outp def test_to_iso8601_string_utc(self): - inp = '1970-01-01T00:00:00Z' - outp = '1970-01-01T00:00:00Z' + inp = "1970-01-01T00:00:00Z" + outp = "1970-01-01T00:00:00Z" assert dttrans.to_iso8601(inp) == outp def test_to_iso8601_string_tz_offset(self): - inp = '1970-01-01T00:00:00+02:00' - outp = '1970-01-01T00:00:00+02:00' + inp = "1970-01-01T00:00:00+02:00" + outp = "1970-01-01T00:00:00+02:00" assert dttrans.to_iso8601(inp) == outp def test_to_iso8601_string_invalid_tz(self): - inp = '1970-01-01T00:00:00+02:00Z' + inp = "1970-01-01T00:00:00+02:00Z" with pytest.raises(dterrors.FormatError): dttrans.to_iso8601(inp) def test_to_iso8601_datetime_with_tz_utc(self): inp = datetime(1970, 1, 1, tzinfo=timezone(timedelta(hours=0))) # A timezone of 00:00 should return 'Z' instead. - outp = '1970-01-01T00:00:00Z' + outp = "1970-01-01T00:00:00Z" assert dttrans.to_iso8601(inp) == outp def test_to_iso8601_datetime_with_tz_offset(self): inp = datetime(1970, 1, 1, tzinfo=timezone(timedelta(hours=2))) - outp = '1970-01-01T00:00:00+02:00' + outp = "1970-01-01T00:00:00+02:00" assert dttrans.to_iso8601(inp) == outp def test_to_iso8601_datetime_without_tz(self): inp = datetime(1970, 1, 1) - outp = '1970-01-01T00:00:00Z' + outp = "1970-01-01T00:00:00Z" assert dttrans.to_iso8601(inp) == outp def test_to_datetime_invalid_type(self): - inp = {'timestamp': datetime(1970, 1, 1)} + inp = {"timestamp": datetime(1970, 1, 1)} with pytest.raises(TypeError): dttrans.to_datetime(inp) @@ -65,17 +64,17 @@ def test_to_datetime_none(self): assert dttrans.to_datetime(inp) == outp def test_to_datetime_missing_tz(self): - inp = '1970-01-01T00:00:00' + inp = "1970-01-01T00:00:00" with pytest.raises(dterrors.FormatError): dttrans.to_datetime(inp) def test_to_datetime_tz_utc(self): - inp = '1970-01-01T00:00:00Z' + inp = "1970-01-01T00:00:00Z" outp = datetime(1970, 1, 1, tzinfo=timezone(timedelta(hours=0))) assert dttrans.to_datetime(inp) == outp def test_to_datetime_tz_offset(self): - inp = '1970-01-01T00:00:00+02:00' + inp = "1970-01-01T00:00:00+02:00" outp = datetime(1970, 1, 1, tzinfo=timezone(timedelta(hours=2))) assert dttrans.to_datetime(inp) == outp @@ -85,18 +84,18 @@ def test_to_datetime_already_datetime(self): assert dttrans.to_datetime(inp) == outp def test_validate_iso8601_format_valid(self): - inp1 = '1970-01-01T00:00:00Z' + inp1 = "1970-01-01T00:00:00Z" assert dttrans.validate_iso8601_format(inp1) is True - inp2 = '1970-01-01T00:00:00+00:00' + inp2 = "1970-01-01T00:00:00+00:00" assert dttrans.validate_iso8601_format(inp2) is True def test_validate_iso8601_format_missing_tz(self): - inp = '1970-01-01T00:00:00' + inp = "1970-01-01T00:00:00" assert dttrans.validate_iso8601_format(inp) is False def test_validate_iso8601_format_date_only(self): - inp = '1970-01-01' + inp = "1970-01-01" assert dttrans.validate_iso8601_format(inp) is False def test_camel_to_snake_case(self): @@ -108,24 +107,24 @@ class TestCase: tests = [ TestCase( - name='single case', - give_str='camelCase', - want_str='camel_case', + name="single case", + give_str="camelCase", + want_str="camel_case", ), TestCase( - name='multiple cases', - give_str='camelCaseDoesntBelongInPython', - want_str='camel_case_doesnt_belong_in_python', + name="multiple cases", + give_str="camelCaseDoesntBelongInPython", + want_str="camel_case_doesnt_belong_in_python", ), TestCase( - name='keep dots', - give_str='name.camelCase', - want_str='name.camel_case', + name="keep dots", + give_str="name.camelCase", + want_str="name.camel_case", ), TestCase( - name='keep spaces', - give_str='name and camelCase', - want_str='name and camel_case', + name="keep spaces", + give_str="name and camelCase", + want_str="name and camel_case", ), ]