From e64c1c29369f1ccacc693c0f27b2396ccdc6bfcb Mon Sep 17 00:00:00 2001 From: Rosen Penev Date: Sat, 7 Dec 2024 15:16:08 -0800 Subject: [PATCH 1/2] tools: convert strings to f strings Done with find tools -name "*.py" -exec flynt -tc -tj -a '{}' \; Signed-off-by: Rosen Penev --- tools/create_release.py | 6 +++--- tools/sanity_checks.py | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tools/create_release.py b/tools/create_release.py index 2c5bf7fe3..adc94dc4d 100755 --- a/tools/create_release.py +++ b/tools/create_release.py @@ -44,7 +44,7 @@ def __init__(self, repo: T.Optional[str], token: T.Optional[str], tag: str): self.create_wrap_file() def read_wrap(self): - filename = Path('subprojects', self.name + '.wrap') + filename = Path('subprojects', f'{self.name}.wrap') self.wrap = configparser.ConfigParser(interpolation=None) self.wrap.read(filename) self.wrap_section = self.wrap[self.wrap.sections()[0]] @@ -86,7 +86,7 @@ def create_patch_zip(self): base_name = Path(self.tempdir, f'{self.tag}_patch') shutil.make_archive(base_name.as_posix(), 'zip', root_dir=self.tempdir, base_dir=directory) - patch_filename = base_name.with_name(base_name.name + '.zip') + patch_filename = base_name.with_name(f'{base_name.name}.zip') self.upload(patch_filename, 'application/zip') h = hashlib.sha256() @@ -101,7 +101,7 @@ def create_patch_zip(self): def create_wrap_file(self): self.wrap_section['wrapdb_version'] = self.version - filename = Path(self.tempdir, self.name + '.wrap') + filename = Path(self.tempdir, f'{self.name}.wrap') # configparser write() adds multiple trailing newlines, collapse them buf = io.StringIO() diff --git a/tools/sanity_checks.py b/tools/sanity_checks.py index 4992b7b9e..ce86f4439 100755 --- a/tools/sanity_checks.py +++ b/tools/sanity_checks.py @@ -173,7 +173,7 @@ def test_releases_json(self): if name in {'sqlite', 'libjpeg'}: continue self.assertIn(name, self.releases) - self.assertIn(version, self.releases[name]['versions'], f"for {name}") + self.assertIn(version, self.releases[name]['versions'], f'for {name}') # Verify keys are sorted self.assertEqual(sorted(self.releases.keys()), list(self.releases.keys())) @@ -388,7 +388,7 @@ def check_new_release(self, name: str, builddir: str = '_build', deps=None, prog meson_env = os.environ.copy() def install_packages(kind, cmd, packages): if is_ci(): - with ci_group('install {} packages'.format(kind)): + with ci_group(f'install {kind} packages'): subprocess.check_call(cmd + packages) else: s = ', '.join(packages) @@ -436,7 +436,7 @@ def install_packages(kind, cmd, packages): if 'unsupported' in error or 'not supported' in error or 'does not support' in error: print('unsupported, as expected') return - elif any('ERROR: '+x in error for x in {'Dependency', 'Program', 'Pkg-config binary', 'CMake binary'}): + elif any(f'ERROR: {x}' in error for x in {'Dependency', 'Program', 'Pkg-config binary', 'CMake binary'}): if 'not found' in error: print('cannot verify in wrapdb due to missing dependency') return @@ -480,10 +480,10 @@ def check_files(self, subproject: str, patch_path: Path) -> None: tabs.append(f) if tabs: tabs_str = ', '.join([str(f) for f in tabs]) - self.fail('Tabs in meson files are not allowed: ' + tabs_str) + self.fail(f'Tabs in meson files are not allowed: {tabs_str}') if not_permitted: not_permitted_str = ', '.join([str(f) for f in not_permitted]) - self.fail('Not permitted files found: ' + not_permitted_str) + self.fail(f'Not permitted files found: {not_permitted_str}') if __name__ == '__main__': From 1e81a5af027a1c89d53a83a59d099a4c8d3394ba Mon Sep 17 00:00:00 2001 From: Rosen Penev Date: Sat, 7 Dec 2024 15:49:32 -0800 Subject: [PATCH 2/2] tools: some pylint suggested cleanups No return after else and remove some branches Signed-off-by: Rosen Penev --- tools/create_release.py | 4 ++-- tools/fake_tty.py | 2 +- tools/import-wraps.py | 4 ++-- tools/sanity_checks.py | 35 ++++++++++++++++------------------- 4 files changed, 21 insertions(+), 24 deletions(-) diff --git a/tools/create_release.py b/tools/create_release.py index adc94dc4d..e3cef5e9c 100755 --- a/tools/create_release.py +++ b/tools/create_release.py @@ -125,7 +125,7 @@ def find_upload_url(self): response.raise_for_status() for r in response.json(): if r['tag_name'] == self.tag: - self.upload_url = r['upload_url'].replace(u'{?name,label}','') + self.upload_url = r['upload_url'].replace('{?name,label}','') print('Found release:', self.upload_url) return @@ -135,7 +135,7 @@ def find_upload_url(self): } response = requests.post(api, headers=headers, json=content) response.raise_for_status() - self.upload_url = response.json()['upload_url'].replace(u'{?name,label}','') + self.upload_url = response.json()['upload_url'].replace('{?name,label}','') print('Created release:', self.upload_url) def upload(self, path: Path, mimetype: str): diff --git a/tools/fake_tty.py b/tools/fake_tty.py index a4baf3ccc..571ba2828 100755 --- a/tools/fake_tty.py +++ b/tools/fake_tty.py @@ -3,4 +3,4 @@ import os, pty, sys os.environ['TERM'] = 'xterm-256color' -exit(os.waitstatus_to_exitcode(pty.spawn(sys.argv[1:]))) +sys.exit(os.waitstatus_to_exitcode(pty.spawn(sys.argv[1:]))) diff --git a/tools/import-wraps.py b/tools/import-wraps.py index 3b426e33b..fa0e88eca 100755 --- a/tools/import-wraps.py +++ b/tools/import-wraps.py @@ -40,7 +40,7 @@ def get_wrap_info(wrap: str) -> T.List[T.Tuple[str, str]]: version, revision = line.split() versions.append((version, revision)) except subprocess.CalledProcessError: - pass + pass return versions def rewrite_wrap(wrap: str): @@ -86,7 +86,7 @@ def create_release(tag: str, token: str): # This release has already been uploaded by previous run of the script return None response.raise_for_status() - return response.json()['upload_url'].replace(u'{?name,label}','') + return response.json()['upload_url'].replace('{?name,label}','') def upload(upload_url: str, content: T.AnyStr, mimetype: str, name: str, token: str): headers = { diff --git a/tools/sanity_checks.py b/tools/sanity_checks.py index ce86f4439..c0d4c692c 100755 --- a/tools/sanity_checks.py +++ b/tools/sanity_checks.py @@ -194,7 +194,7 @@ def check_meson_version(self, name: str, version: str, patch_path: str, builddir json_file = Path(builddir) / "meson-info/intro-projectinfo.json" # don't check if the build was skipped if json_file.exists(): - with open(json_file) as project_info_file: + with open(json_file, encoding='utf-8') as project_info_file: project_info = json.load(project_info_file) subproject, = [subproj for subproj in project_info["subprojects"] if subproj["name"] == name] if subproject['version'] != 'undefined' and patch_path: @@ -329,27 +329,24 @@ def check_has_no_path_separators(self, value: str) -> None: def check_source_url(self, name: str, wrap_section: configparser.SectionProxy, version: str): if name == 'sqlite3': segs = version.split('.') - assert(len(segs) == 3) + assert len(segs) == 3 version = segs[0] + segs[1] + '0' + segs[2] elif name == 're2': version = f'{version[:4]}-{version[4:6]}-{version[6:8]}' - elif name == 'netstring-c': - # There is no specific version for netstring-c - return True - elif name == 'directxmath': - # DirectXMath source url contains only tag name without version - return True - elif name == 'luajit': - # LuaJIT source URL does not contain the version number. - return True elif name == 'x-plane-sdk': segs = version.split('.') self.assertEqual(len(segs), 3) version = segs[0] + segs[1] + segs[2] + elif name in {'netstring-c', 'directxmath', 'luajit'}: + # There is no specific version for netstring-c + # DirectXMath source url contains only tag name without version + # LuaJIT source URL does not contain the version number. + return True source_url = wrap_section['source_url'] version_ = version.replace('.', '_') self.assertTrue(version in source_url or version_ in source_url, f'Version {version} not found in {source_url}') + return True def check_new_release(self, name: str, builddir: str = '_build', deps=None, progs=None): print() # Ensure output starts from an empty line (we're running under unittest). @@ -426,7 +423,7 @@ def install_packages(kind, cmd, packages): if res.returncode == 0: if not expect_working: raise Exception(f'Wrap {name} successfully configured but was expected to fail') - if res.returncode != 0: + else: if expect_working: res.check_returncode() else: @@ -436,16 +433,16 @@ def install_packages(kind, cmd, packages): if 'unsupported' in error or 'not supported' in error or 'does not support' in error: print('unsupported, as expected') return - elif any(f'ERROR: {x}' in error for x in {'Dependency', 'Program', 'Pkg-config binary', 'CMake binary'}): - if 'not found' in error: - print('cannot verify in wrapdb due to missing dependency') - return - elif 'ERROR: Could not execute Vala compiler: valac' in error: + if 'ERROR: Could not execute Vala compiler: valac' in error: print('cannot verify in wrapdb due to missing dependency') return - elif 'ERROR: failed to unpack archive with error: ' in error: + if 'ERROR: failed to unpack archive with error: ' in error: print('cannot verify in wrapdb because the archive cannot be unpacked') return + if any(f'ERROR: {x}' in error for x in ['Dependency', 'Program', 'Pkg-config binary', 'CMake binary']): + if 'not found' in error: + print('cannot verify in wrapdb due to missing dependency') + return raise Exception(f'Wrap {name} failed to configure due to bugs in the wrap, rather than due to being unsupported') subprocess.check_call(['meson', 'compile', '-C', builddir], env=meson_env) if not ci.get('skip_tests', False): @@ -474,7 +471,7 @@ def check_files(self, subproject: str, patch_path: Path) -> None: for f in patch_path.rglob('*'): if f.is_dir(): continue - elif not self.is_permitted_file(subproject, f.name): + if not self.is_permitted_file(subproject, f.name): not_permitted.append(f) elif f.name in NO_TABS_FILES and '\t' in f.read_text(encoding='utf-8'): tabs.append(f)