Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
bdpedigo committed Feb 16, 2024
1 parent 6ec4a66 commit 5ad7634
Show file tree
Hide file tree
Showing 8 changed files with 176 additions and 84 deletions.
2 changes: 1 addition & 1 deletion caveclient/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

from .frameworkclient import CAVEclient

__all__ = ["CAVEclient"]
__all__ = ["CAVEclient"]
12 changes: 8 additions & 4 deletions caveclient/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,9 @@ def _check_authorization_redirect(response):
)


def _api_versions(server_name, server_address, endpoints_common, auth_header, verify=True):
def _api_versions(
server_name, server_address, endpoints_common, auth_header, verify=True
):
"""Asks a server what API versions are available, if possible"""
url_mapping = {server_name: server_address}
url_base = endpoints_common.get("get_api_versions", None)
Expand All @@ -140,7 +142,11 @@ def _api_endpoints(
if api_version == "latest":
try:
avail_vs_server = _api_versions(
server_name, server_address, endpoints_common, auth_header, verify=verify
server_name,
server_address,
endpoints_common,
auth_header,
verify=verify,
)
avail_vs_server = set(avail_vs_server)
except:
Expand Down Expand Up @@ -241,7 +247,6 @@ def __init__(
pool_block=None,
over_client=None,
):

super(ClientBaseWithDataset, self).__init__(
server_address,
auth_header,
Expand Down Expand Up @@ -276,7 +281,6 @@ def __init__(
pool_block=None,
over_client=None,
):

super(ClientBaseWithDatastack, self).__init__(
server_address,
auth_header,
Expand Down
34 changes: 22 additions & 12 deletions caveclient/datastack_lookup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,65 +2,76 @@
import json
from . import auth
import logging

logger = logging.getLogger(__name__)

DEFAULT_LOCATION = auth.default_token_location
DEFAULT_DATASTACK_FILE = 'cave_datastack_to_server_map.json'
DEFAULT_DATASTACK_FILE = "cave_datastack_to_server_map.json"


def read_map(filename = None):
def read_map(filename=None):
if filename is None:
filename = os.path.join(DEFAULT_LOCATION, DEFAULT_DATASTACK_FILE)
try:
with open(os.path.expanduser(filename), 'r') as f:
with open(os.path.expanduser(filename), "r") as f:
data = json.load(f)
return data
except:
return {}


def is_writable(filename):
# File exists but is not writeable
if os.path.exists(os.path.expanduser(filename)):
if not os.access(os.path.expanduser(filename), os.W_OK):
return False
else:
else:
try:
# File does not exist so make the directories if possible
if not os.path.exists(os.path.expanduser(DEFAULT_LOCATION)):
os.makedirs(os.path.expanduser(DEFAULT_LOCATION))
with open(os.path.expanduser(filename), 'w') as f:
os.makedirs(os.path.expanduser(DEFAULT_LOCATION))
with open(os.path.expanduser(filename), "w") as f:
if not f.writable():
return False
except IOError:
return False
return True

def write_map(data, filename = None):

def write_map(data, filename=None):
if filename is None:
filename = os.path.join(DEFAULT_LOCATION, DEFAULT_DATASTACK_FILE)

if is_writable(filename):
with open(os.path.expanduser(filename), 'w') as f:
if is_writable(filename):
with open(os.path.expanduser(filename), "w") as f:
json.dump(data, f)
return True
else:
logging.warn(f'Did not write cache — file {os.path.expanduser(filename)} is not writeable')
logging.warn(
f"Did not write cache — file {os.path.expanduser(filename)} is not writeable"
)
return False


def handle_server_address(datastack, server_address, filename=None, write=False):
data = read_map(filename)
if server_address is not None:
if write and server_address != data.get(datastack):
data[datastack] = server_address
wrote = write_map(data, filename)
if wrote:
logger.warning(f"Updated datastack-to-server cache — '{server_address}' will now be used by default for datastack '{datastack}'")
logger.warning(
f"Updated datastack-to-server cache — '{server_address}' will now be used by default for datastack '{datastack}'"
)
return server_address
else:
return data.get(datastack)


def get_datastack_cache(filename=None):
return read_map(filename)


def reset_server_address_cache(datastack, filename=None):
"""Remove one or more datastacks from the datastack-to-server cache.
Expand All @@ -78,4 +89,3 @@ def reset_server_address_cache(datastack, filename=None):
data.pop(ds, None)
logger.warning(f"Wiping '{ds}' from datastack-to-server cache")
write_map(data, filename)

6 changes: 3 additions & 3 deletions caveclient/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,6 @@

fallback_ngl_endpoint = "https://neuroglancer.neuvue.io/"
ngl_endpoints_common = {
'get_info': "{ngl_url}/version.json",
'fallback_ngl_url': fallback_ngl_endpoint,
}
"get_info": "{ngl_url}/version.json",
"fallback_ngl_url": fallback_ngl_endpoint,
}
12 changes: 9 additions & 3 deletions caveclient/format_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,17 @@ def format_precomputed_neuroglancer(objurl):
objurl_out = None
return objurl_out


def format_neuroglancer(objurl):
qry = urlparse(objurl)
if qry.scheme == 'graphene' or 'https':
if qry.scheme == "graphene" or "https":
return format_graphene(objurl)
elif qry.scheme == 'precomputed':
elif qry.scheme == "precomputed":
return format_precomputed_neuroglancer(objurl)
else:
return format_raw(objurl)


def format_precomputed_https(objurl):
qry = urlparse(objurl)
if qry.scheme == "gs":
Expand All @@ -41,6 +43,7 @@ def format_graphene(objurl):
objurl_out = None
return objurl_out


def format_verbose_graphene(objurl):
qry = urlparse(objurl)
if qry.scheme == "http" or qry.scheme == "https":
Expand All @@ -49,6 +52,7 @@ def format_verbose_graphene(objurl):
objurl_out = f"graphene://middleauth+{qry.netloc}{qry.path}"
return objurl_out


def format_cloudvolume(objurl):
qry = urlparse(objurl)
if qry.scheme == "graphene":
Expand All @@ -58,14 +62,16 @@ def format_cloudvolume(objurl):
else:
return None


def format_raw(objurl):
return objurl


def format_cave_explorer(objurl):
qry = urlparse(objurl)
if qry.scheme == "graphene" or qry.scheme == "https":
return format_verbose_graphene(objurl)
elif qry.scheme == 'precomputed':
elif qry.scheme == "precomputed":
return format_precomputed_neuroglancer(objurl)
else:
return None
Expand Down
4 changes: 3 additions & 1 deletion caveclient/materializationengine.py
Original file line number Diff line number Diff line change
Expand Up @@ -2369,7 +2369,9 @@ def query_view(
else:
return response.json()

def get_unique_string_values(self, table: str, datastack_name: Optional[str] = None):
def get_unique_string_values(
self, table: str, datastack_name: Optional[str] = None
):
"""Get unique string values for a table
Parameters
Expand Down
2 changes: 1 addition & 1 deletion caveclient/session_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,4 +46,4 @@ def patch_session(
session.mount("http://", http)
session.mount("https://", http)

pass
pass
Loading

0 comments on commit 5ad7634

Please sign in to comment.