Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RF: remove datalad_deprecated dependency #119

Merged
merged 3 commits into from
Aug 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions datalad_crawler/pipelines/openfmri.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
import os
from os.path import lexists

from datalad.downloaders.s3 import S3Authenticator

# Import necessary nodes
from ..nodes.crawl_url import crawl_url
from ..nodes.matches import a_href_match
Expand All @@ -26,7 +28,6 @@

# For S3 crawling
from .openfmri_s3 import pipeline as s3_pipeline
from datalad.api import ls
from datalad.dochelpers import exc_str

# Possibly instantiate a logger if you would like to log
Expand Down Expand Up @@ -123,9 +124,9 @@ def pipeline(dataset,
# assert suf in 'AB'
# s3_prefix = 'ds017' + suf

openfmri_s3_prefix = 's3://openneuro/'
try:
if not ls('%s%s' % (openfmri_s3_prefix, s3_prefix)):
bucket = S3Authenticator().authenticate("openneuro", None)
if not next(iter(bucket.list(s3_prefix, "/"))):
s3_prefix = None # not there
except Exception as exc:
lgr.warning(
Expand Down
26 changes: 3 additions & 23 deletions datalad_crawler/pipelines/xnat.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,32 +16,10 @@
it is
"""

import os
import re
import json
from os.path import lexists

# Import necessary nodes
from ..nodes.crawl_url import crawl_url
from ..nodes.matches import css_match, a_href_match
from ..nodes.misc import assign
from ..nodes.misc import sub
from ..nodes.misc import switch
from ..nodes.misc import func_to_node
from ..nodes.misc import find_files
from ..nodes.misc import skip_if
from ..nodes.misc import debug
from ..nodes.misc import fix_permissions
from ..nodes.annex import Annexificator
from datalad.utils import updated
from datalad.consts import ARCHIVES_SPECIAL_REMOTE, DATALAD_SPECIAL_REMOTE
from datalad.downloaders.providers import Providers

# For S3 crawling
from ..nodes.s3 import crawl_s3
from .openfmri_s3 import pipeline as s3_pipeline
from datalad.api import ls
from datalad.dochelpers import exc_str

# Possibly instantiate a logger if you would like to log
# during pipeline creation
Expand All @@ -51,9 +29,11 @@
from datalad.tests.utils_pytest import eq_
from datalad.utils import assure_list, assure_bool


def list_to_dict(l, field):
return {r.pop(field): r for r in l}


DEFAULT_RESULT_FIELDS = {'totalrecords', 'result'}
PROJECT_ACCESS_TYPES = {'public', 'protected', 'private'}

Expand Down Expand Up @@ -144,7 +124,7 @@ def __call__(self, query,

def get_projects(self, limit=None, drop_empty=True, asdict=True):
"""Get list of projects

Parameters
----------
limit: {'public', 'protected', 'private', None} or list of thereof
Expand Down
1 change: 0 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ def findsome(subdir, extensions):
requires = {
'core': [
'datalad>=0.14.0',
'datalad_deprecated',
'scrapy>=1.1.0', # versioning is primarily for python3 support
],
'devel-docs': [
Expand Down