Skip to content

Commit 64af46c

Browse files
committed
Fix new B018 errors raised by ruff 0.0.260
1 parent 2c5f896 commit 64af46c

File tree

15 files changed

+23
-30
lines changed

15 files changed

+23
-30
lines changed

lib/galaxy/jobs/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1257,7 +1257,7 @@ def get_special():
12571257
self.app.interactivetool_manager.create_interactivetool(job, self.tool, self.interactivetools)
12581258

12591259
# Ensure galaxy_lib_dir is set in case there are any later chdirs
1260-
self.galaxy_lib_dir
1260+
self.galaxy_lib_dir # noqa: B018
12611261
if self.tool.requires_galaxy_python_environment or self.remote_command_line:
12621262
# These tools (upload, metadata, data_source) may need access to the datatypes registry.
12631263
self.app.datatypes_registry.to_xml_file(os.path.join(self.working_directory, "registry.xml"))
@@ -1387,7 +1387,7 @@ def fail(
13871387
# self.get_destination_configuration() below accesses self.job_destination and will just cause
13881388
# JobMappingException to be raised again.
13891389
try:
1390-
self.job_destination
1390+
self.job_destination # noqa: B018
13911391
except JobMappingException as exc:
13921392
log.debug(
13931393
"(%s) fail(): Job destination raised JobMappingException('%s'), caching fake '__fail__' "
@@ -2602,7 +2602,7 @@ def prepare(self, compute_environment=None):
26022602
self.extra_filenames.extend(extra_filenames)
26032603

26042604
# Ensure galaxy_lib_dir is set in case there are any later chdirs
2605-
self.galaxy_lib_dir
2605+
self.galaxy_lib_dir # noqa: B018
26062606

26072607
# We need command_line persisted to the db in order for Galaxy to re-queue the job
26082608
# if the server was stopped and restarted before the job finished

lib/galaxy/jobs/runners/pbs.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def __init__(self, app, nworkers):
104104

105105
# Set the default server during startup
106106
self.__default_pbs_server = None
107-
self.default_pbs_server # this is a method with a property decorator, so this causes the default server to be set
107+
self.default_pbs_server # noqa: B018 this is a method with a property decorator, so this causes the default server to be set
108108

109109
# Proceed with general initialization
110110
super().__init__(app, nworkers)

lib/galaxy/tools/cache.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ def __init__(self):
146146
def assert_hashes_initialized(self):
147147
if not self._hashes_initialized:
148148
for tool_hash in self._hash_by_tool_paths.values():
149-
tool_hash.hash
149+
tool_hash.hash # noqa: B018
150150
self._hashes_initialized = True
151151

152152
def cleanup(self):
@@ -261,7 +261,7 @@ def __init__(self, path, modtime=None, lazy_hash=False):
261261
self.modtime = modtime or os.path.getmtime(path)
262262
self._tool_hash = None
263263
if not lazy_hash:
264-
self.hash
264+
self.hash # noqa: B018
265265

266266
@property
267267
def hash(self):

lib/galaxy/tools/parameters/dynamic_options.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -570,7 +570,7 @@ def load_from_parameter(from_parameter, transform_lines=None):
570570
self._tool_data_table = None
571571
self.elem = elem
572572
self.column_elem = elem.find("column")
573-
self.tool_data_table # Need to touch tool data table once to populate self.columns
573+
self.tool_data_table # noqa: B018 Need to touch tool data table once to populate self.columns
574574

575575
# Options are defined by parsing tabular text data from a data file
576576
# on disk, a dataset, or the value of another parameter

lib/galaxy/workflow/run_request.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -502,9 +502,8 @@ def add_parameter(name: str, value: str, type: WorkflowRequestInputParameter.typ
502502
workflow_invocation.step_states.append(step_state)
503503

504504
if step.type == "subworkflow":
505-
step.workflow_outputs
506-
assert step.subworkflow
507-
subworkflow: Workflow = step.subworkflow
505+
subworkflow = step.subworkflow
506+
assert subworkflow
508507
effective_outputs: Optional[List[EffectiveOutput]] = None
509508
if run_config.preferred_intermediate_object_store_id or run_config.preferred_outputs_object_store_id:
510509
step_outputs = step.workflow_outputs

lib/galaxy_test/base/decorators.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,7 @@ def using_requirement(tag: KnownRequirementT):
5050

5151
def _attach_requirements(method, tag: KnownRequirementT):
5252
requirement = f"requires_{tag}"
53-
try:
54-
method.__required_galaxy_features
55-
except AttributeError:
53+
if not hasattr(method, "__required_galaxy_features"):
5654
method.__required_galaxy_features = []
5755
method.__required_galaxy_features.append(tag)
5856
getattr(pytest.mark, requirement)(method)

lib/tool_shed/util/repository_content_util.py

-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ def upload_tar(
3030
message = "{} Invalid paths were: {}".format(" ".join(check_results.errors), ", ".join(check_results.invalid))
3131
return False, message, [], "", undesirable_dirs_removed, undesirable_files_removed
3232
else:
33-
repository.hg_repo
3433
repo_dir = repository.repo_path(trans.app)
3534
if upload_point is not None:
3635
full_path = os.path.abspath(os.path.join(repo_dir, upload_point))

scripts/microbes/harvest_bacteria.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def scrape_ftp(ftp_contents, org_dir, org_num, refseq, ftp_url):
137137
results = None
138138
time.sleep(1) # Throttle Connection
139139
if results is None:
140-
"URL COMPLETELY FAILED TO LOAD:", url
140+
print("URL COMPLETELY FAILED TO LOAD:", url)
141141
return
142142

143143
# do special processing for each file type:
@@ -246,7 +246,7 @@ def __main__():
246246
# get ftp contents
247247
ftp_contents = get_ftp_contents(ftp_url)
248248
if ftp_contents is None:
249-
"FTP COMPLETELY FAILED TO LOAD", "org:", org_num, "ftp:", ftp_url
249+
print("FTP COMPLETELY FAILED TO LOAD", "org:", org_num, "ftp:", ftp_url)
250250
else:
251251
for refseq in chroms:
252252
scrape_ftp(ftp_contents, org_dir, org_num, refseq, ftp_url)

test/unit/app/tools/test_toolbox.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -384,14 +384,14 @@ def test_writes_integrate_tool_panel(self):
384384
self._add_config("""<toolbox><tool file="tool.xml" /></toolbox>""")
385385

386386
self.assert_integerated_tool_panel(exists=False)
387-
self.toolbox
387+
self.toolbox # noqa: B018
388388
self.assert_integerated_tool_panel(exists=True)
389389

390390
def test_groups_tools_in_section(self):
391391
self._init_tool()
392392
self._setup_two_versions_in_config(section=True)
393393
self._setup_two_versions()
394-
self.toolbox
394+
self.toolbox # noqa: B018
395395
self.__verify_two_test_tools()
396396

397397
# Assert only newer version of the tool loaded into the panel.
@@ -602,7 +602,7 @@ def __verify_get_tool_for_default_lineage(self):
602602
def __setup_shed_tool_conf(self):
603603
self._add_config("""<toolbox tool_path="."></toolbox>""")
604604

605-
self.toolbox # create toolbox
605+
self.toolbox # noqa: B018 create toolbox
606606
assert not self.reindexed
607607

608608
os.remove(self.integrated_tool_panel_path)

test/unit/app/visualizations/plugins/test_VisualizationsRegistry.py

-1
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,6 @@ def test_plugin_load(self):
131131
assert not vis2.serves_templates
132132

133133
mock_app_dir.remove()
134-
template_cache_dir
135134

136135
def test_script_entry(self):
137136
""""""

test/unit/data/test_dataset_materialization.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -320,8 +320,8 @@ def _ensure_relations_attached_and_expunge(deferred_hda: HistoryDatasetAssociati
320320
# make sure everything needed is in session (sources, hashes, and metadata)...
321321
# point here is exercise deferred_hda.history throws a detached error.
322322
[s.hashes for s in deferred_hda.dataset.sources]
323-
deferred_hda.dataset.hashes
324-
deferred_hda._metadata
323+
deferred_hda.dataset.hashes # noqa: B018
324+
deferred_hda._metadata # noqa: B018
325325
sa_session = fixture_context.sa_session
326326
sa_session.expunge_all()
327327

test/unit/data/test_galaxy_mapping.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -753,7 +753,7 @@ def test_flush_refreshes(self):
753753
assert "id" in inspect(galaxy_model_object_new).unloaded
754754

755755
# Verify a targeted flush prevent expiring unrelated objects.
756-
galaxy_model_object_new.id
756+
galaxy_model_object_new.id # noqa: B018
757757
assert "id" not in inspect(galaxy_model_object_new).unloaded
758758
session.flush(model.GalaxySession())
759759
assert "id" not in inspect(galaxy_model_object_new).unloaded

test/unit/util/test_unittest.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ def test_setUp(self):
1919

2020
def test_assertRaises(self):
2121
with self.assertRaises(ZeroDivisionError):
22-
1 / 0
22+
1 / 0 # noqa: B018
2323

2424
def test_assertRaisesRegex(self):
2525
with self.assertRaisesRegex(ZeroDivisionError, "^division .* zero"):
26-
1 / 0
26+
1 / 0 # noqa: B018

tools/data_source/upload.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -123,9 +123,7 @@ def add_file(dataset, registry, output_path: str) -> Dict[str, str]:
123123
# auto_decompress is a request flag that can be swapped off to prevent Galaxy from automatically
124124
# decompressing archive files before sniffing.
125125
auto_decompress = dataset.get("auto_decompress", True)
126-
try:
127-
dataset.file_type
128-
except AttributeError:
126+
if not hasattr(dataset, "file_type"):
129127
raise UploadProblemException("Unable to process uploaded file, missing file_type parameter.")
130128

131129
if dataset.type == "url":

tools/filters/sff_extract.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
linkerlengths = {} # type: ignore
4848

4949
# set to true if something really fishy is going on with the sequences
50-
stern_warning = True
50+
stern_warning = False
5151

5252

5353
def read_bin_fragment(struct_def, fileh, offset=0, data=None, byte_padding=None):
@@ -983,7 +983,7 @@ def check_for_dubious_startseq(seqcheckstore, sffname, seqdata):
983983
for shortseq, count in seqdict.items():
984984
if float(count) / len(seqcheckstore) >= 0.5:
985985
foundinloop = True
986-
stern_warning
986+
stern_warning = True
987987
foundproblem = "\n" + "*" * 80
988988
foundproblem += "\nWARNING: "
989989
foundproblem += "weird sequences in file " + sffname + "\n\n"

0 commit comments

Comments
 (0)