From 5d05ea9d9a1216bd8a6f41fb8e23a20b1f841f70 Mon Sep 17 00:00:00 2001 From: aaschaer Date: Fri, 17 May 2024 13:29:50 -0500 Subject: [PATCH] TokenStorage (v2) (#980) * TokenStorage and implementations * ValidatingTokenStorage * handle missing namespace on read * renaming from review --- ...425_090230_aaschaer_storage_adapter_v2.rst | 14 + .../auth_requirements_error/_validators.py | 6 + .../experimental/globus_app/__init__.py | 10 +- .../_identifiable_oauth_token_response.py | 45 ---- ...apater.py => _validating_token_storage.py} | 174 ++++++++----- .../experimental/tokenstorage/__init__.py | 14 + .../experimental/tokenstorage/base.py | 136 ++++++++++ .../experimental/tokenstorage/json.py | 159 +++++++++++ .../experimental/tokenstorage/memory.py | 39 +++ .../experimental/tokenstorage/sqlite.py | 246 ++++++++++++++++++ .../experimental/tokenstorage/token_data.py | 62 +++++ tests/functional/tokenstorage_v2/conftest.py | 68 +++++ .../test_json_token_storage.py | 109 ++++++++ .../test_memory_token_storage.py | 52 ++++ .../test_sqlite_token_storage.py | 194 ++++++++++++++ ...er.py => test_validating_token_storage.py} | 62 ++--- 16 files changed, 1249 insertions(+), 141 deletions(-) create mode 100644 changelog.d/20240425_090230_aaschaer_storage_adapter_v2.rst delete mode 100644 src/globus_sdk/experimental/globus_app/_identifiable_oauth_token_response.py rename src/globus_sdk/experimental/globus_app/{_validating_storage_adapater.py => _validating_token_storage.py} (50%) create mode 100644 src/globus_sdk/experimental/tokenstorage/__init__.py create mode 100644 src/globus_sdk/experimental/tokenstorage/base.py create mode 100644 src/globus_sdk/experimental/tokenstorage/json.py create mode 100644 src/globus_sdk/experimental/tokenstorage/memory.py create mode 100644 src/globus_sdk/experimental/tokenstorage/sqlite.py create mode 100644 src/globus_sdk/experimental/tokenstorage/token_data.py create mode 100644 tests/functional/tokenstorage_v2/conftest.py create mode 100644 tests/functional/tokenstorage_v2/test_json_token_storage.py create mode 100644 tests/functional/tokenstorage_v2/test_memory_token_storage.py create mode 100644 tests/functional/tokenstorage_v2/test_sqlite_token_storage.py rename tests/unit/experimental/globus_app/{test_validating_storage_adapter.py => test_validating_token_storage.py} (76%) diff --git a/changelog.d/20240425_090230_aaschaer_storage_adapter_v2.rst b/changelog.d/20240425_090230_aaschaer_storage_adapter_v2.rst new file mode 100644 index 000000000..cd50a1317 --- /dev/null +++ b/changelog.d/20240425_090230_aaschaer_storage_adapter_v2.rst @@ -0,0 +1,14 @@ +Added +~~~~~ + +- Added ``TokenStorage`` to experimental along with ``FileTokenStorage``, + ``JSONTokenStorage``, ``MemoryTokenStorage`` and ``SQLiteTokenStorage`` which + implement it. ``TokenStorage`` expands the functionality of ``StorageAdapter`` + but is not fully backwards compatible. (:pr:`NUMBER`) + +Changed +~~~~~~~ + +- The experimental class ``ValidatingStorageAdapater`` has been renamed to + ``ValidatingTokenStorage`` and now implements ``TokenStorage`` instead of + ``StorageAdapter`` (:pr:`NUMBER`) diff --git a/src/globus_sdk/experimental/auth_requirements_error/_validators.py b/src/globus_sdk/experimental/auth_requirements_error/_validators.py index e14289f8a..29e6d2533 100644 --- a/src/globus_sdk/experimental/auth_requirements_error/_validators.py +++ b/src/globus_sdk/experimental/auth_requirements_error/_validators.py @@ -19,6 +19,12 @@ def str_(name: str, value: t.Any) -> str: raise ValidationError(f"'{name}' must be a string") +def int_(name: str, value: t.Any) -> int: + if isinstance(value, int): + return value + raise ValidationError(f"'{name}' must be an int") + + def opt_str(name: str, value: t.Any) -> str | None: if _guards.is_optional(value, str): return value diff --git a/src/globus_sdk/experimental/globus_app/__init__.py b/src/globus_sdk/experimental/globus_app/__init__.py index 2d7244f18..e8444415d 100644 --- a/src/globus_sdk/experimental/globus_app/__init__.py +++ b/src/globus_sdk/experimental/globus_app/__init__.py @@ -1,11 +1,5 @@ -from ._identifiable_oauth_token_response import ( - IdentifiedOAuthTokenResponse, - expand_id_token, -) -from ._validating_storage_adapater import ValidatingStorageAdapter +from ._validating_token_storage import ValidatingTokenStorage __all__ = [ - "IdentifiedOAuthTokenResponse", - "expand_id_token", - "ValidatingStorageAdapter", + "ValidatingTokenStorage", ] diff --git a/src/globus_sdk/experimental/globus_app/_identifiable_oauth_token_response.py b/src/globus_sdk/experimental/globus_app/_identifiable_oauth_token_response.py deleted file mode 100644 index 7ad9db704..000000000 --- a/src/globus_sdk/experimental/globus_app/_identifiable_oauth_token_response.py +++ /dev/null @@ -1,45 +0,0 @@ -import typing as t - -from globus_sdk import OAuthTokenResponse - -from ..._types import UUIDLike -from .errors import MissingIdentityError - - -class IdentifiedOAuthTokenResponse(OAuthTokenResponse): - """ - A subclass of OAuthTokenResponse with attached identity information. - """ - - def __init__(self, identity_id: UUIDLike, *args: t.Any, **kwargs: t.Any): - super().__init__(*args, **kwargs) - self.identity_id = identity_id - self.by_resource_server["auth.globus.org"]["identity_id"] = identity_id - - -def expand_id_token(response: OAuthTokenResponse) -> IdentifiedOAuthTokenResponse: - """ - Given a token response, return an IdentifiedOAuthTokenResponse object which - extracts the identity information from the token response into the auth - token. - - Any token response passed to this function must have come from an auth flow which - included the "openid" scope. This is because the id_token is only included in - the token response when the "openid" scope is requested. - - :param response: The token response to extract identity information from - :raises: MissingIdentityError if the token response does not contain an id_token - """ - if ( - "auth.globus.org" not in response.by_resource_server - or "id_token" not in response.data - ): - raise MissingIdentityError( - "Token grant response doesn't contain an id_token. This normally occurs if " - "the auth flow didn't include 'openid' alongside other scopes." - ) - - decoded_token = response.decode_id_token() - identity_id = decoded_token["sub"] - - return IdentifiedOAuthTokenResponse(identity_id, response) diff --git a/src/globus_sdk/experimental/globus_app/_validating_storage_adapater.py b/src/globus_sdk/experimental/globus_app/_validating_token_storage.py similarity index 50% rename from src/globus_sdk/experimental/globus_app/_validating_storage_adapater.py rename to src/globus_sdk/experimental/globus_app/_validating_token_storage.py index 7f63f370d..651f792cd 100644 --- a/src/globus_sdk/experimental/globus_app/_validating_storage_adapater.py +++ b/src/globus_sdk/experimental/globus_app/_validating_token_storage.py @@ -1,95 +1,126 @@ from __future__ import annotations import time -import typing as t -from globus_sdk import AuthClient, OAuthTokenResponse, Scope +from globus_sdk import AuthClient, Scope from globus_sdk.experimental.consents import ConsentForest -from globus_sdk.tokenstorage import StorageAdapter +from globus_sdk.experimental.tokenstorage import TokenData, TokenStorage from ..._types import UUIDLike -from ._identifiable_oauth_token_response import ( - IdentifiedOAuthTokenResponse, - expand_id_token, -) from .errors import ( ExpiredTokenError, IdentityMismatchError, + MissingIdentityError, UnmetScopeRequirementsError, ) -class ValidatingStorageAdapter(StorageAdapter): +def _get_identity_id_from_token_data_by_resource_server( + token_data_by_resource_server: dict[str, TokenData] +) -> str | None: + """ + Get the identity_id attribute from all TokenData objects by resource server + Sanity check that they are all the same, and then return that identity_id or None """ - A special version of a StorageAdapter which wraps another storage adapter and + token_data_identity_ids: set[str] = set() + + for token_data in token_data_by_resource_server.values(): + if token_data.identity_id: + token_data_identity_ids.add(token_data.identity_id) + + if len(token_data_identity_ids) == 0: + return None + elif len(token_data_identity_ids) == 1: + return token_data_identity_ids.pop() + else: + raise ValueError( + "token_data_by_resource_server contained TokenData objects with " + f"different identity_id values: {token_data_identity_ids}" + ) + + +class ValidatingTokenStorage(TokenStorage): + """ + A special version of TokenStorage which wraps another TokenStorage and validates that tokens meet certain requirements when storing/retrieving them. - The adapter is not concerned with the actual storage location of tokens but rather - validating that they meet certain requirements: + ValidatingTokenStorage is not concerned with the actual storage location of tokens + but rather validating that they meet certain requirements: 1) Identity Requirements a) Identity info is present in the token data (this requires that the token data was retrieved with the "openid" scope in addition to any other scope requirements). b) The identity info in the token data matches the identity info stored - previously in the adapter. + previously. 2) Scope Requirements - b) Each newly polled resource server's token meets the root scope + a) Each newly polled resource server's token meets the root scope requirements for that resource server. - c) Polled consents meets all dependent scope requirements. + b) Polled consents meets all dependent scope requirements. """ def __init__( self, - storage_adapter: StorageAdapter, + token_storage: TokenStorage, scope_requirements: dict[str, list[Scope]], *, consent_client: AuthClient | None = None, ): """ - :param storage_adapter: The storage adapter being wrapped. + :param token_storage: The token storage being wrapped. :param scope_requirements: A collection of resource-server keyed scope requirements to validate on token storage/retrieval. :param consent_client: An AuthClient to be used for consent polling. If omitted, dependent scope requirements are ignored during validation. """ - self._storage_adapter = storage_adapter + self._token_storage = token_storage self.scope_requirements = scope_requirements self._consent_client = consent_client self.identity_id = self._lookup_stored_identity_id() self._cached_consent_forest = self._poll_and_cache_consents() + super().__init__(namespace=token_storage.namespace) + def _lookup_stored_identity_id(self) -> UUIDLike | None: """ Attempts to extract an identity id from stored token data using the internal - storage adapter. + token storage. - :returns: An identity id if one can be extracted from the internal storage - adapter, otherwise None + :returns: An identity id if one can be extracted from the internal token + storage, otherwise None """ - auth_token_data = self._storage_adapter.get_token_data("auth.globus.org") - if auth_token_data is None or "identity_id" not in auth_token_data: - # Either: - # (1) No auth token data is present in the storage adapter or - # (2) No identity token is present in the auth token data. - return None - return t.cast(str, auth_token_data["identity_id"]) + token_data_by_resource_server = ( + self._token_storage.get_token_data_by_resource_server() + ) + return _get_identity_id_from_token_data_by_resource_server( + token_data_by_resource_server + ) + + def store_token_data_by_resource_server( + self, token_data_by_resource_server: dict[str, TokenData] + ) -> None: + + self._validate_token_data_by_resource_server(token_data_by_resource_server) + self._token_storage.store_token_data_by_resource_server( + token_data_by_resource_server + ) - def store(self, token_response: OAuthTokenResponse) -> None: + def get_token_data_by_resource_server(self) -> dict[str, TokenData]: """ - :param token_response: A OAuthTokenResponse resulting from a Globus Auth flow. - :raises: :exc:`TokenValidationError` if the token has expired does not meet - the attached scope requirements, or is associated with a different identity - than was previously used with this adapter. + :returns: A dict of TokenData objects indexed by their resource server + :raises: :exc:`TokenValidationError` if any of the token data have expired or + do not meet the attached scope requirements. """ + token_data_by_resource_server = ( + self._token_storage.get_token_data_by_resource_server() + ) - # Extract id_token info, raising an error if it's not present. - identified_token_response = expand_id_token(token_response) + for resource_server, token_data in token_data_by_resource_server.items(): + self._validate_token_meets_scope_requirements(resource_server, token_data) - self._validate_response(identified_token_response) - self._storage_adapter.store(identified_token_response) + return token_data_by_resource_server - def get_token_data(self, resource_server: str) -> dict[str, t.Any] | None: + def get_token_data(self, resource_server: str) -> TokenData | None: """ :param resource_server: A resource server with cached token data. :returns: The token data for the given resource server, or None if no token data @@ -97,7 +128,7 @@ def get_token_data(self, resource_server: str) -> dict[str, t.Any] | None: :raises: :exc:`TokenValidationError` if the token has expired or does not meet the attached scope requirements. """ - token_data = self._storage_adapter.get_token_data(resource_server) + token_data = self._token_storage.get_token_data(resource_server) if token_data is None: return None @@ -105,18 +136,30 @@ def get_token_data(self, resource_server: str) -> dict[str, t.Any] | None: return token_data - def _validate_response(self, token_response: IdentifiedOAuthTokenResponse) -> None: - self._validate_response_meets_identity_requirements(token_response) - self._validate_response_meets_scope_requirements(token_response) + def remove_token_data(self, resource_server: str) -> bool: + """ + :param resource_server: The resource server string to remove token data for + """ + return self._token_storage.remove_token_data(resource_server) - def _validate_token(self, resource_server: str, token: dict[str, t.Any]) -> None: - if token["expires_at_seconds"] < time.time(): - raise ExpiredTokenError(token["expires_at_seconds"]) + def _validate_token_data_by_resource_server( + self, token_data_by_resource_server: dict[str, TokenData] + ) -> None: + self._validate_token_data_by_resource_server_meets_identity_requirements( + token_data_by_resource_server + ) + self._validate_token_data_by_resource_server_meets_scope_requirements( + token_data_by_resource_server + ) - self._validate_token_meets_scope_requirements(resource_server, token) + def _validate_token_data(self, resource_server: str, token_data: TokenData) -> None: + if token_data.expires_at_seconds < time.time(): + raise ExpiredTokenError(token_data.expires_at_seconds) - def _validate_response_meets_identity_requirements( - self, token_response: IdentifiedOAuthTokenResponse + self._validate_token_meets_scope_requirements(resource_server, token_data) + + def _validate_token_data_by_resource_server_meets_identity_requirements( + self, token_data_by_resource_server: dict[str, TokenData] ) -> None: """ Validate that the identity info in the token data matches the stored identity @@ -125,34 +168,47 @@ def _validate_response_meets_identity_requirements( Side Effect =========== If no identity info was previously stored, the attached identity is considered - authoritative and stored on the adapter instance. + authoritative and stored on the token storage instance. :raises: :exc:`IdentityMismatchError` if the identity info in the token data does not match the stored identity info. + :raises :exc:`MissingIdentityError` if the token data did not have identity + information (generally due to missing the openid scope) """ + token_data_identity_id = _get_identity_id_from_token_data_by_resource_server( + token_data_by_resource_server + ) + + if token_data_identity_id is None: + raise MissingIdentityError( + "Token grant response doesn't contain an id_token. This normally " + "occurs if the auth flow didn't include 'openid' alongside other " + "scopes." + ) + if self.identity_id is None: - self.identity_id = token_response.identity_id + self.identity_id = token_data_identity_id return - if token_response.identity_id != self.identity_id: + if token_data_identity_id != self.identity_id: raise IdentityMismatchError( "Detected a change in identity associated with the token data.", stored_id=self.identity_id, - new_id=token_response.identity_id, + new_id=token_data_identity_id, ) - def _validate_response_meets_scope_requirements( - self, token_response: IdentifiedOAuthTokenResponse + def _validate_token_data_by_resource_server_meets_scope_requirements( + self, token_data_by_resource_server: dict[str, TokenData] ) -> None: - for resource_server, token_data in token_response.by_resource_server.items(): - self._validate_token(resource_server, token_data) + for resource_server, token_data in token_data_by_resource_server.items(): + self._validate_token_data(resource_server, token_data) def _validate_token_meets_scope_requirements( - self, resource_server: str, token: dict[str, t.Any] + self, resource_server: str, token_data: TokenData ) -> None: """ - Given a particular resource server/token, evaluate whether the token + user's - consent forest meet the attached scope requirements. + Given a particular resource server/token data, evaluate whether the token + + user's consent forest meet the attached scope requirements. Note: If consent_client was omitted, only root scope requirements are validated. @@ -166,7 +222,7 @@ def _validate_token_meets_scope_requirements( return # 1. Does the token meet root scope requirements? - root_scopes = token["scope"].split(" ") + root_scopes = token_data.scope.split(" ") if not all(scope.scope_string in root_scopes for scope in required_scopes): raise UnmetScopeRequirementsError( "Unmet root scope requirements", diff --git a/src/globus_sdk/experimental/tokenstorage/__init__.py b/src/globus_sdk/experimental/tokenstorage/__init__.py new file mode 100644 index 000000000..9f6b5b68b --- /dev/null +++ b/src/globus_sdk/experimental/tokenstorage/__init__.py @@ -0,0 +1,14 @@ +from globus_sdk.experimental.tokenstorage.base import FileTokenStorage, TokenStorage +from globus_sdk.experimental.tokenstorage.json import JSONTokenStorage +from globus_sdk.experimental.tokenstorage.memory import MemoryTokenStorage +from globus_sdk.experimental.tokenstorage.sqlite import SQLiteTokenStorage +from globus_sdk.experimental.tokenstorage.token_data import TokenData + +__all__ = ( + "JSONTokenStorage", + "SQLiteTokenStorage", + "TokenStorage", + "FileTokenStorage", + "MemoryTokenStorage", + "TokenData", +) diff --git a/src/globus_sdk/experimental/tokenstorage/base.py b/src/globus_sdk/experimental/tokenstorage/base.py new file mode 100644 index 000000000..28cb88397 --- /dev/null +++ b/src/globus_sdk/experimental/tokenstorage/base.py @@ -0,0 +1,136 @@ +from __future__ import annotations + +import abc +import contextlib +import os +import typing as t + +from globus_sdk.services.auth import OAuthTokenResponse + +from .token_data import TokenData + + +class TokenStorage(metaclass=abc.ABCMeta): + """ + Abstract base class for interacting with an underlying storage system to manage + storage of token data. + + The ``namespace`` is a user-supplied way of partitioning data, and any token + response data passed to the storage adapter are stored indexed by + *resource_server*. If you have a more complex use-case in which this scheme will be + insufficient, you should encode that in your choice of ``namespace`` values. + """ + + def __init__(self, namespace: str = "DEFAULT") -> None: + """ + :param namespace: A user-supplied namespace for partitioning token data. + """ + self.namespace = namespace + + @abc.abstractmethod + def store_token_data_by_resource_server( + self, token_data_by_resource_server: dict[str, TokenData] + ) -> None: + """ + Store token data in underlying storage partitioned by the resource server + and the current namespace. + + :param token_data_by_resource_server: a ``dict`` of ``TokenData`` objects + indexed by their ``resource_server``. + """ + + @abc.abstractmethod + def get_token_data_by_resource_server(self) -> dict[str, TokenData]: + """ + Lookup all token data under the current namespace in the underlying storage. + + Returns a dict of ``TokenData`` objects indexed by their resource server. + """ + + def get_token_data(self, resource_server: str) -> TokenData | None: + """ + Lookup token data for a resource server in the underlying storage + under the current namespace. + + Either returns a ``TokenData`` object containing tokens and metadata for + the given resource server or ``None`` indicating that there was no data for + that resource server. + + :param resource_server: The resource_server string to get token data for + """ + return self.get_token_data_by_resource_server().get(resource_server) + + @abc.abstractmethod + def remove_token_data(self, resource_server: str) -> bool: + """ + Remove all token data for a resource server from the underlying storage under + the current namespace. + + Returns True if token data was deleted, False if none was found to delete. + + :param resource_server: The resource server string to remove token data for + """ + + def store_token_response(self, token_response: OAuthTokenResponse) -> None: + """ + Wrapper around ``store_token_data_by_resource_server`` that accepts an + ``OAuthTokenResponse``. + + :param token_response: An ``OAuthTokenResponse`` from an authentication flow + """ + token_data_by_resource_server = {} + + # get identity_id from id_token if available + if token_response.get("id_token"): + decoded_id_token = token_response.decode_id_token() + identity_id = decoded_id_token["sub"] + else: + identity_id = None + + for resource_server, token_dict in token_response.by_resource_server.items(): + token_data_by_resource_server[resource_server] = TokenData( + resource_server=token_dict["resource_server"], + identity_id=identity_id, + scope=token_dict["scope"], + access_token=token_dict["access_token"], + refresh_token=token_dict.get("refresh_token"), + expires_at_seconds=token_dict["expires_at_seconds"], + token_type=token_dict.get("token_type"), + ) + self.store_token_data_by_resource_server(token_data_by_resource_server) + + +class FileTokenStorage(TokenStorage, metaclass=abc.ABCMeta): + """ + File adapters are for single-user cases, where we can assume that there's a + simple file-per-user and users are only ever attempting to read their own + files. + """ + + filename: str + + def file_exists(self) -> bool: + """ + Check if the file used by this file storage adapter exists. + """ + return os.path.exists(self.filename) + + @contextlib.contextmanager + def user_only_umask(self) -> t.Iterator[None]: + """ + A context manager to deny rwx to Group and World, x to User + + This does not create a file, but ensures that if a file is created while in the + context manager, its permissions will be correct on unix systems. + + .. note:: + + On Windows, this has no effect. To control the permissions on files used for + token storage, use ``%LOCALAPPDATA%`` or ``%APPDATA%``. + These directories should only be accessible to the current user. + """ + old_umask = os.umask(0o177) + try: + yield + finally: + os.umask(old_umask) diff --git a/src/globus_sdk/experimental/tokenstorage/json.py b/src/globus_sdk/experimental/tokenstorage/json.py new file mode 100644 index 000000000..ed76f0d1e --- /dev/null +++ b/src/globus_sdk/experimental/tokenstorage/json.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import json +import pathlib +import typing as t + +from globus_sdk.experimental.tokenstorage.base import FileTokenStorage +from globus_sdk.version import __version__ + +from .token_data import TokenData + + +class JSONTokenStorage(FileTokenStorage): + """ + A storage adapter for storing token data in JSON files. + """ + + # the version for the current data format used by the file adapter. + # 1.0 was used by ``SimpleJSONFileAdapter``. If ``JSONFileAdapter`` is + # pointed at storage used by a ``SimpleJSONFileAdapter` it will be converted to 2.0 + # and no longer usable by ``SimpleJSONFileAdapter``. + format_version = "2.0" + + # the supported versions (data not in these versions causes an error) + supported_versions = ("1.0", "2.0") + + def __init__(self, filename: pathlib.Path | str, *, namespace: str = "DEFAULT"): + """ + :param filename: the name of the file to write to and read from + :param namespace: A user-supplied namespace for partitioning token data + """ + self.filename = str(filename) + super().__init__(namespace=namespace) + + def _raw_load(self) -> dict[str, t.Any]: + """ + Load the file contents as JSON and return the resulting dict + object. If a dict is not found, raises an error. + """ + with open(self.filename, encoding="utf-8") as f: + val = json.load(f) + if not isinstance(val, dict): + raise ValueError("reading from json file got non-dict data") + return val + + def _handle_formats(self, read_data: dict[str, t.Any]) -> dict[str, t.Any]: + """Handle older data formats supported by this class + + if the data is not in a known/recognized format, this will error + otherwise, reshape the data to the current supported format and return it + """ + format_version = read_data.get("format_version") + if format_version not in self.supported_versions: + raise ValueError( + f"cannot store data using SimpleJSONFileAdapter({self.filename} " + "existing data file is in an unknown format " + f"(format_version={format_version})" + ) + + # 1.0 data was stored under a "by_rs" key without namespaces, to upgrade we + # move everything under the "DEFAULT" key and remove the "by_rs" key. + if format_version == "1.0": + read_data = { + "data": { + "DEFAULT": read_data["by_rs"], + }, + "format_version": self.format_version, + "globus-sdk.version": __version__, + } + + return read_data + + def _load(self) -> dict[str, t.Any]: + """ + Load data from the file and ensure that the data is in a modern format which can + be handled by the rest of the adapter. + + If the file is missing, this will return a "skeleton" for new data. + """ + try: + data = self._raw_load() + except FileNotFoundError: + return { + "data": {}, + "format_version": self.format_version, + "globus-sdk.version": __version__, + } + return self._handle_formats(data) + + def store_token_data_by_resource_server( + self, token_data_by_resource_server: dict[str, TokenData] + ) -> None: + """ + Store token data as JSON data in ``self.filename`` under the current namespace + + Additionally will write the version of ``globus_sdk``which was in use. + + Under the assumption that this may be running on a system with multiple + local users, this sets the umask such that only the owner of the + resulting file can read or write it. + + :param token_data_by_resource_server: a ``dict`` of ``TokenData`` objects + indexed by their ``resource_server``. + """ + to_write = self._load() + + # create the namespace if it does not exist + if self.namespace not in to_write["data"]: + to_write["data"][self.namespace] = {} + + # add token data by resource server to namespaced data + for resource_server, token_data in token_data_by_resource_server.items(): + to_write["data"][self.namespace][resource_server] = token_data.to_dict() + + # update globus-sdk version + to_write["globus-sdk.version"] = __version__ + + # write the file, denying rwx to Group and World, exec to User + with self.user_only_umask(): + with open(self.filename, "w", encoding="utf-8") as f: + json.dump(to_write, f) + + def get_token_data_by_resource_server(self) -> dict[str, TokenData]: + """ + Lookup all token data under the current namespace from the JSON file. + + Returns a dict of ``TokenData`` objects indexed by their resource server. + """ + # TODO: when the Globus SDK drops support for py3.6 and py3.7, we can update + # `_load` to return a TypedDict which guarantees the response is a dict + # see: https://www.python.org/dev/peps/pep-0589/ + ret = {} + dicts_by_resource_server = t.cast( + t.Dict[str, t.Any], self._load()["data"].get(self.namespace, {}) + ) + for resource_server, token_data_dict in dicts_by_resource_server.items(): + ret[resource_server] = TokenData.from_dict(token_data_dict) + return ret + + def remove_token_data(self, resource_server: str) -> bool: + """ + Remove all tokens for a resource server from the JSON data, then overwrite + ``self.filename``. + + Returns True if token data was removed, False if none was found to remove. + + :param resource_server: The resource server string to remove tokens for + """ + to_write = self._load() + + # pop the token data out if it exists + popped = to_write["data"].get(self.namespace, {}).pop(resource_server, None) + + # overwrite the file, denying rwx to Group and World, exec to User + with self.user_only_umask(): + with open(self.filename, "w", encoding="utf-8") as f: + json.dump(to_write, f) + + return popped is not None diff --git a/src/globus_sdk/experimental/tokenstorage/memory.py b/src/globus_sdk/experimental/tokenstorage/memory.py new file mode 100644 index 000000000..23407ecae --- /dev/null +++ b/src/globus_sdk/experimental/tokenstorage/memory.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +import typing as t + +from globus_sdk.experimental.tokenstorage.base import TokenStorage + +from .token_data import TokenData + + +class MemoryTokenStorage(TokenStorage): + """ + A token storage adapter which stores token data in process memory. + + Tokens are lost when the process exits. + """ + + def __init__(self, *, namespace: str = "DEFAULT") -> None: + self._tokens: dict[str, dict[str, t.Any]] = {} + super().__init__(namespace=namespace) + + def store_token_data_by_resource_server( + self, token_data_by_resource_server: dict[str, TokenData] + ) -> None: + if self.namespace not in self._tokens: + self._tokens[self.namespace] = {} + + for resource_server, token_data in token_data_by_resource_server.items(): + self._tokens[self.namespace][resource_server] = token_data.to_dict() + + def get_token_data_by_resource_server(self) -> dict[str, TokenData]: + ret = {} + dicts_by_resource_server = self._tokens.get(self.namespace, {}) + for resource_server, token_data_dict in dicts_by_resource_server.items(): + ret[resource_server] = TokenData.from_dict(token_data_dict) + return ret + + def remove_token_data(self, resource_server: str) -> bool: + popped = self._tokens.get(self.namespace, {}).pop(resource_server, None) + return popped is not None diff --git a/src/globus_sdk/experimental/tokenstorage/sqlite.py b/src/globus_sdk/experimental/tokenstorage/sqlite.py new file mode 100644 index 000000000..660bd889a --- /dev/null +++ b/src/globus_sdk/experimental/tokenstorage/sqlite.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +import json +import pathlib +import sqlite3 +import textwrap +import typing as t + +from globus_sdk.experimental.tokenstorage.base import FileTokenStorage +from globus_sdk.version import __version__ + +from .token_data import TokenData + + +class SQLiteTokenStorage(FileTokenStorage): + """ + A storage adapter for storing token data in sqlite databases. + + SQLite adapters are for more complex cases, where there may be multiple users or + "profiles" in play, and additionally a dynamic set of resource servers which need to + be stored in an extensible way. + + The ``connect_params`` is an optional dictionary whose elements are passed directly + to the underlying ``sqlite3.connect()`` method, enabling developers to fine-tune the + connection to the SQLite database. Refer to the ``sqlite3.connect()`` + documentation for SQLite-specific parameters. + """ + + def __init__( + self, + dbname: pathlib.Path | str, + *, + connect_params: dict[str, t.Any] | None = None, + namespace: str = "DEFAULT", + ): + """ + :param dbname: The name of the DB file to write to and read from. If the string + ":memory:" is used, an in-memory database will be used instead. + :param connect_params: A pass-through dictionary for fine-tuning the SQLite + connection. + :param namespace: A user-supplied namespace for partitioning token data. + """ + self.filename = self.dbname = str(dbname) + self._connection = self._init_and_connect(connect_params) + super().__init__(namespace=namespace) + + def _is_memory_db(self) -> bool: + return self.dbname == ":memory:" + + def _init_and_connect( + self, + connect_params: dict[str, t.Any] | None, + ) -> sqlite3.Connection: + init_tables = self._is_memory_db() or not self.file_exists() + connect_params = connect_params or {} + if init_tables and not self._is_memory_db(): # real file needs to be created + with self.user_only_umask(): + conn = sqlite3.connect(self.dbname, **connect_params) + else: + conn = sqlite3.connect(self.dbname, **connect_params) + if init_tables: + conn.executescript( + textwrap.dedent( + """ + CREATE TABLE config_storage ( + namespace VARCHAR NOT NULL, + config_name VARCHAR NOT NULL, + config_data_json VARCHAR NOT NULL, + PRIMARY KEY (namespace, config_name) + ); + CREATE TABLE token_storage ( + namespace VARCHAR NOT NULL, + resource_server VARCHAR NOT NULL, + token_data_json VARCHAR NOT NULL, + PRIMARY KEY (namespace, resource_server) + ); + CREATE TABLE sdk_storage_adapter_internal ( + attribute VARCHAR NOT NULL, + value VARCHAR NOT NULL, + PRIMARY KEY (attribute) + ); + """ + ) + ) + # mark the version which was used to create the DB + # also mark the "database schema version" in case we ever need to handle + # graceful upgrades + conn.executemany( + "INSERT INTO sdk_storage_adapter_internal(attribute, value) " + "VALUES (?, ?)", + [ + ("globus-sdk.version", __version__), + ("globus-sdk.database_schema_version", "1"), + ], + ) + conn.commit() + return conn + + def close(self) -> None: + """ + Close the underlying database connection. + """ + self._connection.close() + + def store_config( + self, config_name: str, config_dict: t.Mapping[str, t.Any] + ) -> None: + """ + Store a config dict under the current namespace in the config table. + Allows arbitrary configuration data to be namespaced under the namespace, so + that application config may be associated with the stored token data. + + Uses sqlite "REPLACE" to perform the operation. + + :param config_name: A string name for the configuration value + :param config_dict: A dict of config which will be stored serialized as JSON + """ + self._connection.execute( + "REPLACE INTO config_storage(namespace, config_name, config_data_json) " + "VALUES (?, ?, ?)", + (self.namespace, config_name, json.dumps(config_dict)), + ) + self._connection.commit() + + def read_config(self, config_name: str) -> dict[str, t.Any] | None: + """ + Load a config dict under the current namespace in the config table. + If no value is found, returns None + + :param config_name: A string name for the configuration value + """ + row = self._connection.execute( + "SELECT config_data_json FROM config_storage " + "WHERE namespace=? AND config_name=?", + (self.namespace, config_name), + ).fetchone() + + if row is None: + return None + config_data_json = row[0] + val = json.loads(config_data_json) + if not isinstance(val, dict): + raise ValueError("reading config data and got non-dict result") + return val + + def remove_config(self, config_name: str) -> bool: + """ + Delete a previously stored configuration value. + + Returns True if data was deleted, False if none was found to delete. + + :param config_name: A string name for the configuration value + """ + rowcount = self._connection.execute( + "DELETE FROM config_storage WHERE namespace=? AND config_name=?", + (self.namespace, config_name), + ).rowcount + self._connection.commit() + return rowcount != 0 + + def store_token_data_by_resource_server( + self, token_data_by_resource_server: dict[str, TokenData] + ) -> None: + """ + Given a dict of token data indexed by resource server, convert the data into + JSON dicts and write it to ``self.dbname`` under the current namespace + + :param token_data_by_resource_server: a ``dict`` of ``TokenData`` objects + indexed by their ``resource_server``. + """ + pairs = [] + for resource_server, token_data in token_data_by_resource_server.items(): + pairs.append((resource_server, token_data.to_dict())) + + self._connection.executemany( + "REPLACE INTO token_storage(namespace, resource_server, token_data_json) " + "VALUES(?, ?, ?)", + [ + (self.namespace, rs_name, json.dumps(token_data_dict)) + for (rs_name, token_data_dict) in pairs + ], + ) + self._connection.commit() + + def get_token_data_by_resource_server(self) -> dict[str, TokenData]: + """ + Lookup all token data under the current namespace from the database. + + Returns a dict of ``TokenData`` objects indexed by their resource server. + """ + ret: dict[str, TokenData] = {} + for row in self._connection.execute( + "SELECT resource_server, token_data_json " + "FROM token_storage WHERE namespace=?", + (self.namespace,), + ): + resource_server, token_data_json = row + token_data_dict = json.loads(token_data_json) + ret[resource_server] = TokenData.from_dict(token_data_dict) + return ret + + def remove_token_data(self, resource_server: str) -> bool: + """ + Given a resource server to target, delete token data for that resource server + from the database (limited to the current namespace). + You can use this as part of a logout command implementation, loading token data + as a dict, and then deleting the data for each resource server. + + Returns True if token data was deleted, False if none was found to delete. + + :param resource_server: The name of the resource server to remove from the DB + """ + rowcount = self._connection.execute( + "DELETE FROM token_storage WHERE namespace=? AND resource_server=?", + (self.namespace, resource_server), + ).rowcount + self._connection.commit() + return rowcount != 0 + + def iter_namespaces( + self, *, include_config_namespaces: bool = False + ) -> t.Iterator[str]: + """ + Iterate over the namespaces which are in use in this storage adapter's database. + The presence of tokens for a namespace does not indicate that those tokens are + valid, only that they have been stored and have not been removed. + + :param include_config_namespaces: Include namespaces which appear only in the + configuration storage section of the sqlite database. By default, only + namespaces which were used for token storage will be returned + """ + seen: set[str] = set() + for row in self._connection.execute( + "SELECT DISTINCT namespace FROM token_storage;" + ): + namespace = row[0] + seen.add(namespace) + yield namespace + + if include_config_namespaces: + for row in self._connection.execute( + "SELECT DISTINCT namespace FROM config_storage;" + ): + namespace = row[0] + if namespace not in seen: + yield namespace diff --git a/src/globus_sdk/experimental/tokenstorage/token_data.py b/src/globus_sdk/experimental/tokenstorage/token_data.py new file mode 100644 index 000000000..904ac95f6 --- /dev/null +++ b/src/globus_sdk/experimental/tokenstorage/token_data.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import typing as t + +from globus_sdk.experimental.auth_requirements_error import _serializable, _validators + + +class TokenData(_serializable.Serializable): + """ + Data class containing tokens and metadata for a specific resource server used + as the python interface for ``TokenStorage``. + Contains the following attributes: + + resource_server: str + The name of the resource server this token data is valid for + + identity_id: str + A UUID string for the user this token data was granted to. This value may + be None if the original token grant did not include the "openid" scope + + scope: str + A space separated list of scopes these tokens provide access to. + + access_token: str + An access token that can be used for authentication with Globus APIs. + + refresh_token: str | None + A refresh token that can be used for refresh token grants. This value may be + None if the original token grant did not allow for refresh tokens. + + expires_at_seconds: int + A POSIX timestamp for the time when access_token expires. + + token_type: str | None + The token type of access_token, currently this will always be "Bearer" if present + + extra: dict | None + A dictionary of additional fields that were provided. May be used for + forward/backward compatibility. + """ + + def __init__( + self, + resource_server: str, + identity_id: str | None, + scope: str, + access_token: str, + refresh_token: str | None, + expires_at_seconds: int, + token_type: str | None, + extra: dict[str, t.Any] | None = None, + ): + self.resource_server = _validators.str_("resource_server", resource_server) + self.identity_id = _validators.opt_str("identity_id", identity_id) + self.scope = _validators.str_("scope", scope) + self.access_token = _validators.str_("access_token", access_token) + self.refresh_token = _validators.opt_str("refresh_token", refresh_token) + self.expires_at_seconds = _validators.int_( + "expires_at_seconds", expires_at_seconds + ) + self.token_type = _validators.opt_str("token_type", token_type) + self.extra = extra or {} diff --git a/tests/functional/tokenstorage_v2/conftest.py b/tests/functional/tokenstorage_v2/conftest.py new file mode 100644 index 000000000..9cc63b6fe --- /dev/null +++ b/tests/functional/tokenstorage_v2/conftest.py @@ -0,0 +1,68 @@ +import shutil +import tempfile +import time +from unittest import mock + +import pytest + +from globus_sdk.experimental.tokenstorage import TokenData + + +@pytest.fixture +def tempdir(): + d = tempfile.mkdtemp() + yield d + shutil.rmtree(d) + + +@pytest.fixture +def mock_token_data_by_resource_server(): + expiration_time = int(time.time()) + 3600 + ret = { + "resource_server_1": TokenData( + resource_server="resource_server_1", + identity_id="user_id", + scope="scope1", + access_token="access_token_1", + refresh_token="refresh_token_1", + expires_at_seconds=expiration_time, + token_type="Bearer", + ), + "resource_server_2": TokenData( + resource_server="resource_server_2", + identity_id="user_id", + scope="scope2 scope2:0 scope2:1", + access_token="access_token_2", + refresh_token="refresh_token_2", + expires_at_seconds=expiration_time, + token_type="Bearer", + ), + } + return ret + + +@pytest.fixture +def mock_response(): + res = mock.Mock() + expiration_time = int(time.time()) + 3600 + res.by_resource_server = { + "resource_server_1": { + "access_token": "access_token_1", + "expires_at_seconds": expiration_time, + "refresh_token": "refresh_token_1", + "resource_server": "resource_server_1", + "scope": "scope1", + "token_type": "Bearer", + }, + "resource_server_2": { + "access_token": "access_token_2", + "expires_at_seconds": expiration_time, + "refresh_token": "refresh_token_2", + "resource_server": "resource_server_2", + "scope": "scope2 scope2:0 scope2:1", + "token_type": "Bearer", + }, + } + res.decode_id_token.return_value = {"sub": "user_id"} + + return res diff --git a/tests/functional/tokenstorage_v2/test_json_token_storage.py b/tests/functional/tokenstorage_v2/test_json_token_storage.py new file mode 100644 index 000000000..0de5e7157 --- /dev/null +++ b/tests/functional/tokenstorage_v2/test_json_token_storage.py @@ -0,0 +1,109 @@ +import json +import os + +import pytest + +from globus_sdk.experimental.tokenstorage import JSONTokenStorage +from globus_sdk.tokenstorage import SimpleJSONFileAdapter +from globus_sdk.version import __version__ + +IS_WINDOWS = os.name == "nt" + + +@pytest.fixture +def filename(tempdir): + return os.path.join(tempdir, "mydata.json") + + +def test_file_does_not_exist(filename): + adapter = JSONTokenStorage(filename) + assert not adapter.file_exists() + + +def test_file_exists(filename): + open(filename, "w").close() # open and close to touch + adapter = JSONTokenStorage(filename) + assert adapter.file_exists() + + +def test_store_and_get_token_data_by_resource_server( + filename, mock_token_data_by_resource_server +): + adapter = JSONTokenStorage(filename) + assert not adapter.file_exists() + adapter.store_token_data_by_resource_server(mock_token_data_by_resource_server) + + gotten = adapter.get_token_data_by_resource_server() + + for resource_server in ["resource_server_1", "resource_server_2"]: + assert ( + mock_token_data_by_resource_server[resource_server].to_dict() + == gotten[resource_server].to_dict() + ) + + +def test_store_token_response_with_namespace(filename, mock_response): + adapter = JSONTokenStorage(filename, namespace="foo") + assert not adapter.file_exists() + adapter.store_token_response(mock_response) + + with open(filename) as f: + data = json.load(f) + assert data["globus-sdk.version"] == __version__ + assert data["data"]["foo"]["resource_server_1"]["access_token"] == "access_token_1" + assert data["data"]["foo"]["resource_server_2"]["access_token"] == "access_token_2" + + +def test_get_token_data(filename, mock_response): + adapter = JSONTokenStorage(filename) + assert not adapter.file_exists() + adapter.store_token_response(mock_response) + + assert adapter.get_token_data("resource_server_1").access_token == "access_token_1" + + +def test_remove_token_data(filename, mock_response): + adapter = JSONTokenStorage(filename) + assert not adapter.file_exists() + adapter.store_token_response(mock_response) + + # remove rs1, confirm only rs2 is still available + remove_result = adapter.remove_token_data("resource_server_1") + assert remove_result is True + + assert adapter.get_token_data("resource_server_1") is None + assert adapter.get_token_data("resource_server_2").access_token == "access_token_2" + + # confirm unable to re-remove rs1 + remove_result = adapter.remove_token_data("resource_server_1") + assert remove_result is False + + +@pytest.mark.xfail(IS_WINDOWS, reason="cannot set umask perms on Windows") +def test_store_perms(filename, mock_response): + adapter = JSONTokenStorage(filename) + assert not adapter.file_exists() + adapter.store_token_response(mock_response) + + # mode|0600 should be 0600 -- meaning that those are the maximal + # permissions given + st_mode = os.stat(filename).st_mode & 0o777 # & 777 to remove extra bits + assert st_mode | 0o600 == 0o600 + + +def test_migrate_from_simple(filename, mock_response): + # write with a SimpleJSONFileAdapter + old_adapter = SimpleJSONFileAdapter(filename) + old_adapter.store(mock_response) + + # confirm able to read with JSONTokenStorage + new_adapter = JSONTokenStorage(filename) + assert ( + new_adapter.get_token_data("resource_server_1").access_token == "access_token_1" + ) + + # confirm version is overwritten on next store + new_adapter.store_token_response(mock_response) + with open(filename) as f: + data = json.load(f) + assert data["format_version"] == "2.0" diff --git a/tests/functional/tokenstorage_v2/test_memory_token_storage.py b/tests/functional/tokenstorage_v2/test_memory_token_storage.py new file mode 100644 index 000000000..f149be171 --- /dev/null +++ b/tests/functional/tokenstorage_v2/test_memory_token_storage.py @@ -0,0 +1,52 @@ +from globus_sdk.experimental.tokenstorage import MemoryTokenStorage + + +def test_store_and_get_token_data_by_resource_server( + mock_token_data_by_resource_server, +): + adapter = MemoryTokenStorage() + adapter.store_token_data_by_resource_server(mock_token_data_by_resource_server) + + gotten = adapter.get_token_data_by_resource_server() + + for resource_server in ["resource_server_1", "resource_server_2"]: + assert ( + mock_token_data_by_resource_server[resource_server].to_dict() + == gotten[resource_server].to_dict() + ) + + +def test_store_token_response_with_namespace(mock_response): + adapter = MemoryTokenStorage(namespace="foo") + adapter.store_token_response(mock_response) + + assert ( + adapter._tokens["foo"]["resource_server_1"]["access_token"] == "access_token_1" + ) + assert ( + adapter._tokens["foo"]["resource_server_2"]["access_token"] == "access_token_2" + ) + + +def test_get_token_data(mock_response): + adapter = MemoryTokenStorage() + adapter.store_token_response(mock_response) + + assert adapter.get_token_data("resource_server_1").access_token == "access_token_1" + assert adapter.get_token_data("resource_server_2").access_token == "access_token_2" + + +def test_remove_token_data(mock_response): + adapter = MemoryTokenStorage() + adapter.store_token_response(mock_response) + + # remove rs1, confirm only rs2 is still available + remove_result = adapter.remove_token_data("resource_server_1") + assert remove_result is True + + assert adapter.get_token_data("resource_server_1") is None + assert adapter.get_token_data("resource_server_2").access_token == "access_token_2" + + # confirm unable to re-remove rs1 + remove_result = adapter.remove_token_data("resource_server_1") + assert remove_result is False diff --git a/tests/functional/tokenstorage_v2/test_sqlite_token_storage.py b/tests/functional/tokenstorage_v2/test_sqlite_token_storage.py new file mode 100644 index 000000000..89b7c48ee --- /dev/null +++ b/tests/functional/tokenstorage_v2/test_sqlite_token_storage.py @@ -0,0 +1,194 @@ +import os + +import pytest + +from globus_sdk.experimental.tokenstorage import SQLiteTokenStorage +from globus_sdk.tokenstorage import SQLiteAdapter + + +@pytest.fixture +def db_filename(tempdir): + return os.path.join(tempdir, "test.db") + + +MEMORY_DBNAME = ":memory:" + + +@pytest.fixture +def adapters_to_close(): + data = set() + yield data + for x in data: + x.close() + + +@pytest.fixture +def make_adapter(adapters_to_close): + def func(*args, **kwargs): + ret = SQLiteTokenStorage(*args, **kwargs) + adapters_to_close.add(ret) + return ret + + return func + + +@pytest.mark.parametrize( + "success, use_file, kwargs", + [ + (False, False, {}), + (False, False, {"namespace": "foo"}), + (True, False, {"dbname": MEMORY_DBNAME}), + (True, False, {"dbname": MEMORY_DBNAME, "namespace": "foo"}), + (True, True, {}), + (True, True, {"namespace": "foo"}), + (False, True, {"dbname": MEMORY_DBNAME}), + (False, True, {"dbname": MEMORY_DBNAME, "namespace": "foo"}), + ], +) +def test_constructor(success, use_file, kwargs, db_filename, make_adapter): + if success: + if use_file: + make_adapter(db_filename, **kwargs) + else: + make_adapter(**kwargs) + else: + with pytest.raises(TypeError): + if use_file: + make_adapter(db_filename, **kwargs) + else: + make_adapter(**kwargs) + + +def test_store_and_retrieve_simple_config(make_adapter): + adapter = make_adapter(MEMORY_DBNAME) + store_val = {"val1": True, "val2": None, "val3": 1.4} + adapter.store_config("myconf", store_val) + read_val = adapter.read_config("myconf") + assert read_val == store_val + assert read_val is not store_val + + +def test_store_and_get_token_data_by_resource_server( + mock_token_data_by_resource_server, make_adapter +): + adapter = make_adapter(MEMORY_DBNAME) + adapter.store_token_data_by_resource_server(mock_token_data_by_resource_server) + + gotten = adapter.get_token_data_by_resource_server() + + for resource_server in ["resource_server_1", "resource_server_2"]: + assert ( + mock_token_data_by_resource_server[resource_server].to_dict() + == gotten[resource_server].to_dict() + ) + + +def test_multiple_adapters_store_and_retrieve(mock_response, db_filename, make_adapter): + adapter1 = make_adapter(db_filename) + adapter2 = make_adapter(db_filename) + adapter1.store_token_response(mock_response) + + assert adapter2.get_token_data("resource_server_1").access_token == "access_token_1" + assert adapter2.get_token_data("resource_server_2").access_token == "access_token_2" + + +def test_multiple_adapters_store_and_retrieve_different_namespaces( + mock_response, db_filename, make_adapter +): + adapter1 = make_adapter(db_filename, namespace="foo") + adapter2 = make_adapter(db_filename, namespace="bar") + adapter1.store_token_response(mock_response) + + data = adapter2.get_token_data_by_resource_server() + assert data == {} + + +def test_load_missing_config_data(make_adapter): + adapter = make_adapter(MEMORY_DBNAME) + assert adapter.read_config("foo") is None + + +def test_load_missing_token_data(make_adapter): + adapter = make_adapter(MEMORY_DBNAME) + assert adapter.get_token_data_by_resource_server() == {} + assert adapter.get_token_data("resource_server_1") is None + + +def test_remove_tokens(mock_response, make_adapter): + adapter = make_adapter(MEMORY_DBNAME) + adapter.store_token_response(mock_response) + + removed = adapter.remove_token_data("resource_server_1") + assert removed + assert adapter.get_token_data("resource_server_1") is None + assert adapter.get_token_data("resource_server_2").access_token == "access_token_2" + + removed = adapter.remove_token_data("resource_server_1") + assert not removed + + +def test_remove_config(make_adapter): + adapter = make_adapter(MEMORY_DBNAME) + store_val = {"val1": True, "val2": None, "val3": 1.4} + adapter.store_config("myconf", store_val) + adapter.store_config("myconf2", store_val) + removed = adapter.remove_config("myconf") + assert removed + read_val = adapter.read_config("myconf") + assert read_val is None + read_val = adapter.read_config("myconf2") + assert read_val == store_val + + removed = adapter.remove_config("myconf") + assert not removed + + +def test_iter_namespaces(mock_response, db_filename, make_adapter): + foo_adapter = make_adapter(db_filename, namespace="foo") + bar_adapter = make_adapter(db_filename, namespace="bar") + baz_adapter = make_adapter(db_filename, namespace="baz") + + for adapter in [foo_adapter, bar_adapter, baz_adapter]: + assert list(adapter.iter_namespaces()) == [] + assert list(adapter.iter_namespaces(include_config_namespaces=True)) == [] + + foo_adapter.store_token_response(mock_response) + + for adapter in [foo_adapter, bar_adapter, baz_adapter]: + assert list(adapter.iter_namespaces()) == ["foo"] + assert list(adapter.iter_namespaces(include_config_namespaces=True)) == ["foo"] + + bar_adapter.store_token_response(mock_response) + + for adapter in [foo_adapter, bar_adapter, baz_adapter]: + assert set(adapter.iter_namespaces()) == {"foo", "bar"} + assert set(adapter.iter_namespaces(include_config_namespaces=True)) == { + "foo", + "bar", + } + + baz_adapter.store_config("some_conf", {}) + + for adapter in [foo_adapter, bar_adapter, baz_adapter]: + assert set(adapter.iter_namespaces()) == {"foo", "bar"} + assert set(adapter.iter_namespaces(include_config_namespaces=True)) == { + "foo", + "bar", + "baz", + } + + +def test_backwards_compatible_storage(mock_response, db_filename, make_adapter): + # store data with SQLiteAdapter + old_adapter = SQLiteAdapter(db_filename) + old_adapter.store(mock_response) + old_adapter.close() + + # retrieve data with SQLiteTokenStorage using the same file + new_adapter = make_adapter(db_filename) + assert ( + new_adapter.get_token_data("resource_server_1").access_token == "access_token_1" + ) + assert ( + new_adapter.get_token_data("resource_server_2").access_token == "access_token_2" + ) diff --git a/tests/unit/experimental/globus_app/test_validating_storage_adapter.py b/tests/unit/experimental/globus_app/test_validating_token_storage.py similarity index 76% rename from tests/unit/experimental/globus_app/test_validating_storage_adapter.py rename to tests/unit/experimental/globus_app/test_validating_token_storage.py index 579c30a4e..9c62704f4 100644 --- a/tests/unit/experimental/globus_app/test_validating_storage_adapter.py +++ b/tests/unit/experimental/globus_app/test_validating_token_storage.py @@ -10,40 +10,40 @@ import globus_sdk from globus_sdk import MISSING, MissingType, OAuthTokenResponse, Scope from globus_sdk.experimental.consents import ConsentForest -from globus_sdk.experimental.globus_app import ValidatingStorageAdapter +from globus_sdk.experimental.globus_app import ValidatingTokenStorage from globus_sdk.experimental.globus_app.errors import ( IdentityMismatchError, MissingIdentityError, UnmetScopeRequirementsError, ) -from globus_sdk.tokenstorage import MemoryAdapter +from globus_sdk.experimental.tokenstorage import MemoryTokenStorage from tests.common import make_consent_forest -def test_validating_storage_adapter_evaluates_identity_requirements( +def test_validating_token_storage_evaluates_identity_requirements( make_token_response, ): id_a, id_b = str(uuid.uuid4()), str(uuid.uuid4()) - adapter = ValidatingStorageAdapter(MemoryAdapter(), {}) + adapter = ValidatingTokenStorage(MemoryTokenStorage(), {}) # Seed the adapter with an initial identity. assert adapter.identity_id is None - adapter.store(make_token_response(identity_id=id_a)) + adapter.store_token_response(make_token_response(identity_id=id_a)) assert adapter.identity_id == id_a # We should be able to store a token with the same identity. - adapter.store(make_token_response(identity_id=id_a)) + adapter.store_token_response(make_token_response(identity_id=id_a)) # We should not be able to store a token with a different identity. with pytest.raises(IdentityMismatchError): - adapter.store(make_token_response(identity_id=id_b)) + adapter.store_token_response(make_token_response(identity_id=id_b)) -def test_validating_storage_adapter_evaluates_root_scope_requirements( +def test_validating_token_storage_evaluates_root_scope_requirements( make_token_response, ): - adapter = ValidatingStorageAdapter( - MemoryAdapter(), {"rs1": [Scope.deserialize("scope1")]} + adapter = ValidatingTokenStorage( + MemoryTokenStorage(), {"rs1": [Scope.deserialize("scope1")]} ) identity_id = str(uuid.uuid4()) valid_token_response = make_token_response( @@ -53,21 +53,22 @@ def test_validating_storage_adapter_evaluates_root_scope_requirements( scopes={"rs1": "scope2"}, identity_id=identity_id ) - adapter.store(valid_token_response) + adapter.store_token_response(valid_token_response) with pytest.raises(UnmetScopeRequirementsError): - adapter.store(invalid_token_response) + adapter.store_token_response(invalid_token_response) assert ( - adapter.get_token_data("rs1") == valid_token_response.by_resource_server["rs1"] + adapter.get_token_data("rs1").access_token + == valid_token_response.by_resource_server["rs1"]["access_token"] ) -def test_validating_storage_adapter_evaluates_dependent_scope_requirements( +def test_validating_token_storage_evaluates_dependent_scope_requirements( make_token_response, consent_client, ): - adapter = ValidatingStorageAdapter( - MemoryAdapter(), + adapter = ValidatingTokenStorage( + MemoryTokenStorage(), {"rs1": [Scope.deserialize("scope[subscope]")]}, consent_client=consent_client, ) @@ -75,44 +76,47 @@ def test_validating_storage_adapter_evaluates_dependent_scope_requirements( consent_client.mocked_forest = make_consent_forest("scope[different_subscope]") with pytest.raises(UnmetScopeRequirementsError): - adapter.store(token_response) + adapter.store_token_response(token_response) consent_client.mocked_forest = make_consent_forest("scope[subscope]") - adapter.store(token_response) + adapter.store_token_response(token_response) - assert adapter.get_token_data("rs1") == token_response.by_resource_server["rs1"] + assert ( + adapter.get_token_data("rs1").access_token + == token_response.by_resource_server["rs1"]["access_token"] + ) -def test_validating_storage_adapter_fails_non_identifiable_responses( +def test_validating_token_storage_fails_non_identifiable_responses( make_token_response, ): - adapter = ValidatingStorageAdapter(MemoryAdapter(), {}) + adapter = ValidatingTokenStorage(MemoryTokenStorage(), {}) token_response = make_token_response(identity_id=None) with pytest.raises(MissingIdentityError): - adapter.store(token_response) + adapter.store_token_response(token_response) -def test_validating_storage_adapter_loads_identity_info_from_storage( +def test_validating_token_storage_loads_identity_info_from_storage( make_token_response, ): # Create an in memory storage adapter - storage = MemoryAdapter() - adapter = ValidatingStorageAdapter(storage, {}) + storage = MemoryTokenStorage() + adapter = ValidatingTokenStorage(storage, {}) # Store an identifiable token response identity_id = str(uuid.uuid4()) token_response = make_token_response(identity_id=identity_id) - adapter.store(token_response) + adapter.store_token_response(token_response) # Create a net new adapter, pointing at the same storage. - new_adapter = ValidatingStorageAdapter(storage, {}) + new_adapter = ValidatingTokenStorage(storage, {}) # Verify that the new adapter loads the identity info from storage. assert new_adapter.identity_id == identity_id -def test_validating_storage_adapter_returns_none_when_no_token_data(): - adapter = ValidatingStorageAdapter(MemoryAdapter(), {}) +def test_validating_token_storage_returns_none_when_no_token_data(): + adapter = ValidatingTokenStorage(MemoryTokenStorage(), {}) assert adapter.get_token_data("rs1") is None