Skip to content

Commit

Permalink
STYLE: Invoke pyupgrade --py37-plus on Python files
Browse files Browse the repository at this point in the history
  • Loading branch information
Leengit authored and hjmjohnson committed Mar 2, 2022
1 parent 15aa8d5 commit 0fbf00a
Show file tree
Hide file tree
Showing 28 changed files with 109 additions and 126 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
cpu_timer.Stop()

print(
"CPU NeighborhoodFilter took {0} seconds with {1} work units.\n".format(
"CPU NeighborhoodFilter took {} seconds with {} work units.\n".format(
cpu_timer.GetMean(), cpu_filter.GetNumberOfWorkUnits()
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
cpu_timer.Stop()

print(
"CPU NeighborhoodFilter took {0} seconds with {1} work units.\n".format(
"CPU NeighborhoodFilter took {} seconds with {} work units.\n".format(
cpu_timer.GetMean(), cpu_filter.GetNumberOfWorkUnits()
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
cpu_timer.Stop()

print(
"CPU MeanFilter took {0} seconds with {1} work units.\n".format(
"CPU MeanFilter took {} seconds with {} work units.\n".format(
cpu_timer.GetMean(), cpu_filter.GetNumberOfWorkUnits()
)
)
Expand Down
16 changes: 8 additions & 8 deletions Utilities/Doxygen/mcdoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def usage():

def setGroup(fname, group):
# sys.stderr.write("Processing "+ fname +"\n")
f = io.open(fname, "r", encoding="utf-8")
f = open(fname, "r", encoding="utf-8")
out = io.StringIO()
# load everything in memory
fcontent = f.read()
Expand All @@ -46,16 +46,16 @@ def setGroup(fname, group):
last = m.end(1)
dcontent = m.group(1)
# we don't care about doxygen fields not about a class
if r"\class" in dcontent and dcontent != " \class classname ":
if r"\class" in dcontent and dcontent != r" \class classname ":
# do we have a line with the expected content?
if re.search(r"\ingroup .*" + group + "(\s|$)", dcontent, re.MULTILINE):
if re.search(r"\ingroup .*" + group + r"(\s|$)", dcontent, re.MULTILINE):
# yes - just keep the content unchanged
out.write(dcontent)
else:
# add the expected group
if "\n" in dcontent:
# this is a multiline content. Find the indent
indent = re.search("( *)(\*|$)", dcontent).group(1)
indent = re.search(r"( *)(\*|$)", dcontent).group(1)
lastLine = dcontent.splitlines()[-1]
if re.match(r"^ *$", lastLine):
out.write(dcontent + "* \\ingroup " + group + "\n" + indent)
Expand All @@ -75,14 +75,14 @@ def setGroup(fname, group):
out.write(dcontent)
out.write(fcontent[last:])
# we can save the content to the original file
f = io.open(fname, "w", encoding="utf-8")
f = open(fname, "w", encoding="utf-8")
f.write(out.getvalue())
f.close()


def checkGroup(fname, group):
# sys.stderr.write("Checking"+ fname + "\n")
f = io.open(fname, "r", encoding="utf-8")
f = open(fname, "r", encoding="utf-8")
# load everything in memory
fcontent = f.read()
f.close()
Expand All @@ -91,10 +91,10 @@ def checkGroup(fname, group):
for m in re.finditer(r"/\*\*(.*?)\*/", fcontent, re.DOTALL):
dcontent = m.group(1)
# we don't care about doxygen fields not about a class
if r"\class" in dcontent and dcontent != " \class classname ":
if r"\class" in dcontent and dcontent != r" \class classname ":
# do we have a line with the expected content?
if not re.search(
r"\\ingroup .*" + group + "(\s|$)", dcontent, re.MULTILINE
r"\\ingroup .*" + group + r"(\s|$)", dcontent, re.MULTILINE
):
# get class name and the line for debug output
cname = re.search(r"\\class +([^ ]*)", dcontent).group(1).strip()
Expand Down
2 changes: 1 addition & 1 deletion Utilities/ITKv5Preparation/replaceClassWithTypename.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def replaceOneInstance(desired_outs):

for fileName in sys.argv[1:]:
print(f"Processing {fileName}")
filePtr = open(fileName, "r")
filePtr = open(fileName)
fileOrignalText = filePtr.read()
filePtr.close()

Expand Down
3 changes: 1 addition & 2 deletions Utilities/Maintenance/ArchiveTestingDataOnAzure.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#!/usr/bin/env python

from __future__ import print_function

description = """
Upload all the ExternalData files to Azure Blob storage.
Expand Down Expand Up @@ -31,7 +30,7 @@ def upload_to_azure(
):
# get the MD5 checksum
print("Uploading " + content_link + " ...")
with open(content_link, "r") as fp:
with open(content_link) as fp:
md5hash = fp.readline().strip()
print("Checksum: " + md5hash)

Expand Down
2 changes: 1 addition & 1 deletion Utilities/Maintenance/ArchiveTestingDataOnGirder.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def _uploadContentLinkItem(
+ " Items can only be added to folders."
)
else:
with open(content_link, "r") as fp:
with open(content_link) as fp:
hash_value = fp.readline().strip()
self._uploadAsItem(
name,
Expand Down
8 changes: 4 additions & 4 deletions Utilities/Maintenance/AuthorsChangesSince.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,9 +311,9 @@ def write_changelog(repo_name, commit_link_prefix, git_revision):

remote_old_tag = remote_tag(remote_spec)

remote_spec = subprocess.check_output(
"git show HEAD:{1}".format(revision, remote), shell=True
).decode("utf-8")
remote_spec = subprocess.check_output(f"git show HEAD:{remote}", shell=True).decode(
"utf-8"
)
remote_new_tag = remote_tag(remote_spec)
remote_repo = remote_repository(remote_spec)

Expand All @@ -328,7 +328,7 @@ def write_changelog(repo_name, commit_link_prefix, git_revision):

try:
log = subprocess.check_output(
"git shortlog --format=%s:%h --topo-order --no-merges {0}..{1}".format(
"git shortlog --format=%s:%h --topo-order --no-merges {}..{}".format(
remote_old_tag, remote_new_tag
),
shell=True,
Expand Down
51 changes: 24 additions & 27 deletions Utilities/Maintenance/BuildHeaderTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
#
# ==========================================================================*/

from __future__ import print_function

usage = """usage: BuildHeaderTest.py <module_name> <module_source_path> <module_binary_path> <maximum_number_of_headers>
Expand All @@ -31,32 +30,30 @@
"""

# Headers to not test because of dependecy issues, etc.
BANNED_HEADERS = set(
(
"itkDynamicLoader.h", # This cannot be included when ITK_DYNAMIC_LOADING is OFF
"itkExceptionObject.h", # There is a pre-processor check so people use itkMacro.h instead.
"itkFFTWForwardFFTImageFilter.h",
"itkFFTWInverseFFTImageFilter.h",
"itkFFTWRealToHalfHermitianForwardFFTImageFilter.h",
"itkFFTWHalfHermitianToRealInverseFFTImageFilter.h",
"itkFFTWComplexToComplexFFTImageFilter.h",
"itkFFTWCommon.h",
"itkPyBuffer.h", # needs Python.h, etc
"itkPyVnl.h", # needs Python.h, etc
"itkPyVectorContainer.h", # needs Python.h, etc
"itkVanHerkGilWermanErodeDilateImageFilter.h", # circular include's
"itkBSplineDeformableTransform.h", # deprecated
"vtkCaptureScreen.h", # these includes require VTK
"itkMultiThreader.h", # Compatibility file, it should not be used
"itkEnableIf.h", # Compatibility file, it should not be used
"itkIsSame.h", # Compatibility file, it should not be used
"itkIsBaseOf.h", # Compatibility file, it should not be used
"itkIsConvertible.h", # Compatibility file, it should not be used
"itkViewImage.h", # Depends on VTK_RENDERING_BACKEND
"QuickView.h", # Depends on VTK_RENDERING_BACKEND
"itkBSplineDeformableTransformInitializer.h",
)
)
BANNED_HEADERS = {
"itkDynamicLoader.h", # This cannot be included when ITK_DYNAMIC_LOADING is OFF
"itkExceptionObject.h", # There is a pre-processor check so people use itkMacro.h instead.
"itkFFTWForwardFFTImageFilter.h",
"itkFFTWInverseFFTImageFilter.h",
"itkFFTWRealToHalfHermitianForwardFFTImageFilter.h",
"itkFFTWHalfHermitianToRealInverseFFTImageFilter.h",
"itkFFTWComplexToComplexFFTImageFilter.h",
"itkFFTWCommon.h",
"itkPyBuffer.h", # needs Python.h, etc
"itkPyVnl.h", # needs Python.h, etc
"itkPyVectorContainer.h", # needs Python.h, etc
"itkVanHerkGilWermanErodeDilateImageFilter.h", # circular include's
"itkBSplineDeformableTransform.h", # deprecated
"vtkCaptureScreen.h", # these includes require VTK
"itkMultiThreader.h", # Compatibility file, it should not be used
"itkEnableIf.h", # Compatibility file, it should not be used
"itkIsSame.h", # Compatibility file, it should not be used
"itkIsBaseOf.h", # Compatibility file, it should not be used
"itkIsConvertible.h", # Compatibility file, it should not be used
"itkViewImage.h", # Depends on VTK_RENDERING_BACKEND
"QuickView.h", # Depends on VTK_RENDERING_BACKEND
"itkBSplineDeformableTransformInitializer.h",
}

HEADER = """/*=========================================================================
*
Expand Down
5 changes: 2 additions & 3 deletions Utilities/Maintenance/FindRedundantHeaderIncludes.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
## you can remove the sibling include.

## in the ITK/Code directory issue the following command
from __future__ import print_function

import os
import sys
Expand Down Expand Up @@ -73,7 +72,7 @@ def comment_out(self, filename, remove_header):
for line in ff:
if line.find(remove_header) != -1:
print(
" Removing {0} from {1}".format(
" Removing {} from {}".format(
line, self.filePathBaseDirs[filename] + "/" + filename
)
)
Expand Down Expand Up @@ -108,7 +107,7 @@ def proc_children(self, node, dupcandidate, starting_child):
for currEdge in nodeEdges:
if dupcandidate in myDependTree[currEdge]:
print(
"Remove {0} from {1}: found hereditary same include in {2}".format(
"Remove {} from {}: found hereditary same include in {}".format(
dupcandidate, starting_child, currEdge
)
)
Expand Down
1 change: 0 additions & 1 deletion Utilities/Maintenance/JREUpdate.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
In the future, the Python girder client can be used for automatic upload.
"""

from __future__ import print_function

import os
import subprocess
Expand Down
2 changes: 1 addition & 1 deletion Utilities/Maintenance/ParallelStripIncludes.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@

def main():

fileList = open(relativeFileList, "r").read().splitlines()
fileList = open(relativeFileList).read().splitlines()

args = []
for i in xrange(0, len(fileList), FILES_PER_PROCESS):
Expand Down
5 changes: 2 additions & 3 deletions Utilities/Maintenance/StripIncludes.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
] # keep those headers
#######################################################################

from __future__ import print_function

import os

Expand Down Expand Up @@ -90,7 +89,7 @@ def checkIfDef(line, ifDefCounter):
def processFile(directory, fileName):

absFileName = "/".join([sourceDir, directory, fileName])
lines = open(absFileName, "r").read().splitlines()
lines = open(absFileName).read().splitlines()
removedLines = []
ifDefCounter = 0
for i, line in enumerate(lines):
Expand Down Expand Up @@ -150,7 +149,7 @@ def processFileList(fileList):

def main():

fileList = open(relativeFileList, "r").read().splitlines()
fileList = open(relativeFileList).read().splitlines()
processFileList(fileList)


Expand Down
11 changes: 6 additions & 5 deletions Utilities/Maintenance/UpdateCopyrightStatementsInITK.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
# This script is designed to help change the copyright notices in all ITK files to a common format.
# For files that are .h, .cxx, .hxx, .c, if there is no other copyright information, add the itkCopyright.

from __future__ import print_function

import re
import sys
Expand Down Expand Up @@ -62,15 +61,17 @@

## Patterns that match the old copyright notice sections
## ITK only copyright
ITKOnlyOldHeader = """ */\* *==.*Program:.*Insight Segmentation & Registration Toolkit.*Copyright .* Insight.*Consortium. All rights reserved.*See ITKCopyright.txt or https://www.itk.org/HTML/Copyright.htm for details.[\n\r ]*This software is distributed WITHOUT ANY WARRANTY; without even.*the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR.*PURPOSE. See the above copyright notices for more information.*=== *\*/[\n\r ]*"""
ITKOnlyOldHeader = """ */\\* *==.*Program:.*Insight Segmentation & Registration Toolkit.*Copyright .* Insight.*Consortium. All rights reserved.*See ITKCopyright.txt or https://www.itk.org/HTML/Copyright.htm for details.[\n\r ]*This software is distributed WITHOUT ANY WARRANTY; without even.*the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR.*PURPOSE. See the above copyright notices for more information.*=== *\\*/[\n\r ]*"""
ITKOnlyOldRE = re.compile(ITKOnlyOldHeader, re.MULTILINE | re.DOTALL | re.IGNORECASE)

## Files that originated in VTK, and now have ITK also
ITKVTKOldHeader = """ */\* *==.*Program:.*Insight Segmentation & Registration Toolkit.*Copyright .* Insight Software Consortium. All rights reserved.*See ITKCopyright.txt or https://www.itk.org/HTML/Copyright.htm for details.[\n\r ]*.*VTKCopyright.txt.*This software is distributed WITHOUT ANY WARRANTY; without even.*the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR.*PURPOSE. See the above copyright notices for more information.*=== *\*/[\n\r ]*"""
ITKVTKOldHeader = """ */\\* *==.*Program:.*Insight Segmentation & Registration Toolkit.*Copyright .* Insight Software Consortium. All rights reserved.*See ITKCopyright.txt or https://www.itk.org/HTML/Copyright.htm for details.[\n\r ]*.*VTKCopyright.txt.*This software is distributed WITHOUT ANY WARRANTY; without even.*the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR.*PURPOSE. See the above copyright notices for more information.*=== *\\*/[\n\r ]*"""
ITKVTKOldRE = re.compile(ITKVTKOldHeader, re.MULTILINE | re.DOTALL | re.IGNORECASE)

## Looking for new files.
NewITKHeader = """ */\* *==.*http://www.apache.org/licenses/LICENSE-2.0.txt.*=== *\*/"""
NewITKHeader = (
r""" */\* *==.*http://www.apache.org/licenses/LICENSE-2.0.txt.*=== *\*/"""
)
NewITKHeaderRE = re.compile(NewITKHeader, re.MULTILINE | re.DOTALL | re.IGNORECASE)

eolSpaceRemove = re.compile(r" *$", re.MULTILINE)
Expand Down Expand Up @@ -116,7 +117,7 @@
currFile = os.path.join(top, ff)
print(currFile)

infile = open(currFile, "r")
infile = open(currFile)
file_text = infile.read()
newstring = (
file_text # default output to input, just in case all search patterns fail
Expand Down
4 changes: 2 additions & 2 deletions Utilities/Maintenance/VCL_ModernizeNaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from collections import OrderedDict

if len(sys.argv) != 2:
usage = """
usage = r"""
INCORRECT USAGE:
{0}
Expand Down Expand Up @@ -713,7 +713,7 @@
cfile = sys.argv[1]

file_as_string = ""
with open(cfile, "r") as rfp:
with open(cfile) as rfp:
file_as_string = rfp.read()
orig_file = file_as_string

Expand Down
2 changes: 1 addition & 1 deletion Utilities/Maintenance/VNL_ModernizeNaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@
cfile = sys.argv[1]

file_as_string = ""
with open(cfile, "r") as rfp:
with open(cfile) as rfp:
original_string = rfp.read()
file_as_string = original_string

Expand Down
7 changes: 3 additions & 4 deletions Utilities/Maintenance/VerifyURLs.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#!/usr/bin/env python

from __future__ import print_function

import sys
import re
Expand All @@ -9,13 +8,13 @@

# compile regular expression to pull out URLs in ITK
# The ignored characters an common delineators, and not strick to the standard
http_re = re.compile('(http://[^\s<>\{\}\|\]\[\)\("]*)')
http_re = re.compile(r'(http://[^\s<>\{\}\|\]\[\)\("]*)')
http_dict = dict()

for arg in sys.argv[1:]:
if not os.path.isfile(arg):
continue
f = open(arg, "r")
f = open(arg)
for l in f.readlines():
mo = http_re.search(l)
if mo is not None:
Expand All @@ -27,7 +26,7 @@
print("Found ", len(http_dict), " unique URLS.")

# compile regular expression to pull out the server address and path
server_re = re.compile("http://([^/]+)(/?[^\s]*)")
server_re = re.compile(r"http://([^/]+)(/?[^\s]*)")

for url, filename in http_dict.items():
mo = server_re.search(url)
Expand Down
Loading

0 comments on commit 0fbf00a

Please sign in to comment.