-
Notifications
You must be signed in to change notification settings - Fork 36
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
12 changed files
with
1,095 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
Added | ||
~~~~~ | ||
|
||
- Added ``TokenStorage`` to experimental along with ``FileTokenStorage``, | ||
``JSONTokenStorage``, ``MemoryTokenStorage`` and ``SQLiteTokenStorage`` which | ||
implement it. ``TokenStorage`` expands the functionality of ``StorageAdapter`` | ||
but is not fully backwards compatible. (:pr:`NUMBER`) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
from globus_sdk.experimental.tokenstorage_v2.base import FileTokenStorage, TokenStorage | ||
from globus_sdk.experimental.tokenstorage_v2.json import JSONTokenStorage | ||
from globus_sdk.experimental.tokenstorage_v2.memory import MemoryTokenStorage | ||
from globus_sdk.experimental.tokenstorage_v2.sqlite import SQLiteTokenStorage | ||
from globus_sdk.experimental.tokenstorage_v2.token_data import TokenData | ||
|
||
__all__ = ( | ||
"JSONTokenStorage", | ||
"SQLiteTokenStorage", | ||
"TokenStorage", | ||
"FileTokenStorage", | ||
"MemoryTokenStorage", | ||
"TokenData", | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,139 @@ | ||
from __future__ import annotations | ||
|
||
import abc | ||
import contextlib | ||
import os | ||
import typing as t | ||
|
||
from globus_sdk.services.auth import OAuthTokenResponse | ||
|
||
from .token_data import TokenData | ||
|
||
|
||
class TokenStorage(metaclass=abc.ABCMeta): | ||
""" | ||
Abstract base class for interacting with an underlying storage system to manage | ||
storage of token data. | ||
The ``namespace`` is a user-supplied way of partitioning data, and any token | ||
response data passed to the storage adapter are stored indexed by | ||
*resource_server*. If you have a more complex use-case in which this scheme will be | ||
insufficient, you should encode that in your choice of ``namespace`` values. | ||
If you have a more complex use-case in which this scheme will be | ||
insufficient, you should encode that in your choice of ``namespace`` values. | ||
""" | ||
|
||
def __init__(self, namespace: str = "DEFAULT") -> None: | ||
""" | ||
:param namespace: A user-supplied namespace for partitioning token data. | ||
""" | ||
self.namespace = namespace | ||
|
||
@abc.abstractmethod | ||
def store_token_data_by_resource_server( | ||
self, token_data_by_resource_server: dict[str, TokenData] | ||
) -> None: | ||
""" | ||
Store token data in underlying storage partitioned by the resource server | ||
and the current namespace. | ||
:param token_data_by_resource_server: a ``dict`` of ``TokenData`` objects | ||
indexed by their ``resource_server``. | ||
""" | ||
|
||
@abc.abstractmethod | ||
def get_token_data_by_resource_server(self) -> dict[str, TokenData]: | ||
""" | ||
Lookup all token data under the current namespace in the underlying storage. | ||
Returns a dict of ``TokenData`` objects indexed by their resource server. | ||
""" | ||
|
||
def get_token_data(self, resource_server: str) -> TokenData | None: | ||
""" | ||
Lookup token data for a resource server in the underlying storage | ||
under the current namespace. | ||
Either returns a ``TokenData`` object containing tokens and metadata for | ||
the given resource server or ``None`` indicating that there was no data for | ||
that resource server. | ||
:param resource_server: The resource_server string to get token data for | ||
""" | ||
return self.get_token_data_by_resource_server().get(resource_server) | ||
|
||
@abc.abstractmethod | ||
def remove_token_data(self, resource_server: str) -> bool: | ||
""" | ||
Remove all token data for a resource server from the underlying storage under | ||
the current namespace. | ||
Returns True if token data was deleted, False if none was found to delete. | ||
:param resource_server: The resource server string to remove token data for | ||
""" | ||
|
||
def store_response(self, token_response: OAuthTokenResponse) -> None: | ||
""" | ||
Wrapper around ``store_token_data_by_resource_server`` that accepts an | ||
``OAuthTokenResponse``. | ||
:param token_response: An ``OAuthTokenResponse`` from an authentication flow | ||
""" | ||
token_data_by_resource_server = {} | ||
|
||
# get identity_id from id_token if available | ||
if token_response.get("id_token"): | ||
decoded_id_token = token_response.decode_id_token() | ||
identity_id = decoded_id_token["sub"] | ||
else: | ||
identity_id = None | ||
|
||
for resource_server, token_dict in token_response.by_resource_server.items(): | ||
token_data_by_resource_server[resource_server] = TokenData( | ||
resource_server=token_dict["resource_server"], | ||
identity_id=identity_id, | ||
scope=token_dict["scope"], | ||
access_token=token_dict["access_token"], | ||
refresh_token=token_dict.get("refresh_token"), | ||
expires_at_seconds=token_dict["expires_at_seconds"], | ||
token_type=token_dict.get("token_type"), | ||
) | ||
self.store_token_data_by_resource_server(token_data_by_resource_server) | ||
|
||
|
||
class FileTokenStorage(TokenStorage, metaclass=abc.ABCMeta): | ||
""" | ||
File adapters are for single-user cases, where we can assume that there's a | ||
simple file-per-user and users are only ever attempting to read their own | ||
files. | ||
""" | ||
|
||
filename: str | ||
|
||
def file_exists(self) -> bool: | ||
""" | ||
Check if the file used by this file storage adapter exists. | ||
""" | ||
return os.path.exists(self.filename) | ||
|
||
@contextlib.contextmanager | ||
def user_only_umask(self) -> t.Iterator[None]: | ||
""" | ||
A context manager to deny rwx to Group and World, x to User | ||
This does not create a file, but ensures that if a file is created while in the | ||
context manager, its permissions will be correct on unix systems. | ||
.. note:: | ||
On Windows, this has no effect. To control the permissions on files used for | ||
token storage, use ``%LOCALAPPDATA%`` or ``%APPDATA%``. | ||
These directories should only be accessible to the current user. | ||
""" | ||
old_umask = os.umask(0o177) | ||
try: | ||
yield | ||
finally: | ||
os.umask(old_umask) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,159 @@ | ||
from __future__ import annotations | ||
|
||
import json | ||
import pathlib | ||
import typing as t | ||
|
||
from globus_sdk.experimental.tokenstorage_v2.base import FileTokenStorage | ||
from globus_sdk.version import __version__ | ||
|
||
from .token_data import TokenData | ||
|
||
|
||
class JSONTokenStorage(FileTokenStorage): | ||
""" | ||
A storage adapter for storing token data in JSON files. | ||
""" | ||
|
||
# the version for the current data format used by the file adapter. | ||
# 1.0 was used by ``SimpleJSONFileAdapter``. If ``JSONFileAdapter`` is | ||
# pointed at storage used by a ``SimpleJSONFileAdapter` it will be converted to 2.0 | ||
# and no longer usable by ``SimpleJSONFileAdapter``. | ||
format_version = "2.0" | ||
|
||
# the supported versions (data not in these versions causes an error) | ||
supported_versions = ("1.0", "2.0") | ||
|
||
def __init__(self, filename: pathlib.Path | str, *, namespace: str = "DEFAULT"): | ||
""" | ||
:param filename: the name of the file to write to and read from | ||
:param namespace: A user-supplied namespace for partitioning token data | ||
""" | ||
self.filename = str(filename) | ||
super().__init__(namespace=namespace) | ||
|
||
def _raw_load(self) -> dict[str, t.Any]: | ||
""" | ||
Load the file contents as JSON and return the resulting dict | ||
object. If a dict is not found, raises an error. | ||
""" | ||
with open(self.filename, encoding="utf-8") as f: | ||
val = json.load(f) | ||
if not isinstance(val, dict): | ||
raise ValueError("reading from json file got non-dict data") | ||
return val | ||
|
||
def _handle_formats(self, read_data: dict[str, t.Any]) -> dict[str, t.Any]: | ||
"""Handle older data formats supported by this class | ||
if the data is not in a known/recognized format, this will error | ||
otherwise, reshape the data to the current supported format and return it | ||
""" | ||
format_version = read_data.get("format_version") | ||
if format_version not in self.supported_versions: | ||
raise ValueError( | ||
f"cannot store data using SimpleJSONFileAdapter({self.filename} " | ||
"existing data file is in an unknown format " | ||
f"(format_version={format_version})" | ||
) | ||
|
||
# 1.0 data was stored under a "by_rs" key without namespaces, to upgrade we | ||
# move everything under the "DEFAULT" key and remove the "by_rs" key. | ||
if format_version == "1.0": | ||
read_data = { | ||
"data": { | ||
"DEFAULT": read_data["by_rs"], | ||
}, | ||
"format_version": self.format_version, | ||
"globus-sdk.version": __version__, | ||
} | ||
|
||
return read_data | ||
|
||
def _load(self) -> dict[str, t.Any]: | ||
""" | ||
Load data from the file and ensure that the data is in a modern format which can | ||
be handled by the rest of the adapter. | ||
If the file is missing, this will return a "skeleton" for new data. | ||
""" | ||
try: | ||
data = self._raw_load() | ||
except FileNotFoundError: | ||
return { | ||
"data": {}, | ||
"format_version": self.format_version, | ||
"globus-sdk.version": __version__, | ||
} | ||
return self._handle_formats(data) | ||
|
||
def store_token_data_by_resource_server( | ||
self, token_data_by_resource_server: dict[str, TokenData] | ||
) -> None: | ||
""" | ||
Store token data as JSON data in ``self.filename`` under the current namespace | ||
Additionally will write the version of ``globus_sdk``which was in use. | ||
Under the assumption that this may be running on a system with multiple | ||
local users, this sets the umask such that only the owner of the | ||
resulting file can read or write it. | ||
:param token_data_by_resource_server: a ``dict`` of ``TokenData`` objects | ||
indexed by their ``resource_server``. | ||
""" | ||
to_write = self._load() | ||
|
||
# create the namespace if it does not exist | ||
if self.namespace not in to_write["data"]: | ||
to_write["data"][self.namespace] = {} | ||
|
||
# add token data by resource server to namespaced data | ||
for resource_server, token_data in token_data_by_resource_server.items(): | ||
to_write["data"][self.namespace][resource_server] = token_data.to_dict() | ||
|
||
# update globus-sdk version | ||
to_write["globus-sdk.version"] = __version__ | ||
|
||
# write the file, denying rwx to Group and World, exec to User | ||
with self.user_only_umask(): | ||
with open(self.filename, "w", encoding="utf-8") as f: | ||
json.dump(to_write, f) | ||
|
||
def get_token_data_by_resource_server(self) -> dict[str, TokenData]: | ||
""" | ||
Lookup all token data under the current namespace from the JSON file. | ||
Returns a dict of ``TokenData`` objects indexed by their resource server. | ||
""" | ||
# TODO: when the Globus SDK drops support for py3.6 and py3.7, we can update | ||
# `_load` to return a TypedDict which guarantees the response is a dict | ||
# see: https://www.python.org/dev/peps/pep-0589/ | ||
ret = {} | ||
dicts_by_resource_server = t.cast( | ||
t.Dict[str, t.Any], self._load()["data"].get(self.namespace) | ||
) | ||
for resource_server, token_data_dict in dicts_by_resource_server.items(): | ||
ret[resource_server] = TokenData.from_dict(token_data_dict) | ||
return ret | ||
|
||
def remove_token_data(self, resource_server: str) -> bool: | ||
""" | ||
Remove all tokens for a resource server from the JSON data, then overwrite | ||
``self.filename``. | ||
Returns True if token data was removed, False if none was found to remove. | ||
:param resource_server: The resource server string to remove tokens for | ||
""" | ||
to_write = self._load() | ||
|
||
# pop the token data out if it exists | ||
popped = to_write["data"].get(self.namespace, {}).pop(resource_server, None) | ||
|
||
# overwrite the file, denying rwx to Group and World, exec to User | ||
with self.user_only_umask(): | ||
with open(self.filename, "w", encoding="utf-8") as f: | ||
json.dump(to_write, f) | ||
|
||
return popped is not None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
from __future__ import annotations | ||
|
||
import typing as t | ||
|
||
from globus_sdk.experimental.tokenstorage_v2.base import TokenStorage | ||
|
||
from .token_data import TokenData | ||
|
||
|
||
class MemoryTokenStorage(TokenStorage): | ||
""" | ||
A token storage adapter which stores token data in process memory. | ||
Tokens are lost when the process exits. | ||
""" | ||
|
||
def __init__(self, *, namespace: str = "DEFAULT") -> None: | ||
self._tokens: dict[str, dict[str, t.Any]] = {} | ||
super().__init__(namespace=namespace) | ||
|
||
def store_token_data_by_resource_server( | ||
self, token_data_by_resource_server: dict[str, TokenData] | ||
) -> None: | ||
if self.namespace not in self._tokens: | ||
self._tokens[self.namespace] = {} | ||
|
||
for resource_server, token_data in token_data_by_resource_server.items(): | ||
self._tokens[self.namespace][resource_server] = token_data.to_dict() | ||
|
||
def get_token_data_by_resource_server(self) -> dict[str, TokenData]: | ||
ret = {} | ||
dicts_by_resource_server = self._tokens.get(self.namespace, {}) | ||
for resource_server, token_data_dict in dicts_by_resource_server.items(): | ||
ret[resource_server] = TokenData.from_dict(token_data_dict) | ||
return ret | ||
|
||
def remove_token_data(self, resource_server: str) -> bool: | ||
popped = self._tokens.get(self.namespace, {}).pop(resource_server, None) | ||
return popped is not None |
Oops, something went wrong.