diff --git a/CHANGELOG.md b/CHANGELOG.md index 50e8ddd85..5cf49c340 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,5 @@ @@ -9,6 +9,8 @@ CHANGELOG 3.1.0 (unreleased) ------------------ +- Upgraded syntax to Python 3.6 (mostly Format-Strings) using pyuprade (PR#2136 by Sebastian Wagner). + ### Configuration ### Core diff --git a/contrib/check_mk/cronjob_intelmq_statistics.py b/contrib/check_mk/cronjob_intelmq_statistics.py index 9b2a156b2..11b636f84 100755 --- a/contrib/check_mk/cronjob_intelmq_statistics.py +++ b/contrib/check_mk/cronjob_intelmq_statistics.py @@ -31,6 +31,6 @@ value = '0' else: value = value.decode() - stats.append("%s=%s" % (key.decode(), value)) + stats.append(f"{key.decode()}={value}") handle.write("|".join(stats)) handle.write('\n') diff --git a/contrib/eventdb/apply_domain_suffix.py b/contrib/eventdb/apply_domain_suffix.py index 528fd0501..48e834a44 100644 --- a/contrib/eventdb/apply_domain_suffix.py +++ b/contrib/eventdb/apply_domain_suffix.py @@ -20,7 +20,7 @@ def eventdb_apply(host, port, table, dry_run, where, filename): if password: - password = input('Password for user %r on %r: ' % (username, host)) + password = input(f'Password for user {username!r} on {host!r}: ') where = 'AND ' + where if where else '' con1 = psycopg2.connect(user=username, diff --git a/contrib/eventdb/apply_mapping_eventdb.py b/contrib/eventdb/apply_mapping_eventdb.py index d46c892b7..9df242183 100644 --- a/contrib/eventdb/apply_mapping_eventdb.py +++ b/contrib/eventdb/apply_mapping_eventdb.py @@ -50,7 +50,7 @@ def eventdb_apply(malware_name_column, malware_family_column, host, port, print("Error: Python module 'psycopg2' is needed but not available.", file=sys.stderr) return 2 if password: - password = input('Password for user %r on %r: ' % (username, host)) + password = input(f'Password for user {username!r} on {host!r}: ') where = 'AND ' + where if where else '' db = psycopg2.connect(database=database, user=username, password=password, diff --git a/contrib/malware_name_mapping/download_mapping.py b/contrib/malware_name_mapping/download_mapping.py index 5ee39dae4..8c9343519 100755 --- a/contrib/malware_name_mapping/download_mapping.py +++ b/contrib/malware_name_mapping/download_mapping.py @@ -38,7 +38,7 @@ def none_len(arg: Optional[list]): def generate_rule(expression: str, identifier: str, name=None): - return {"rulename": name if name else "%s-%s" % (identifier, + return {"rulename": name if name else "{}-{}".format(identifier, hashlib.sha1(expression.encode()).hexdigest()[:10]), "if": {"classification.taxonomy": "malicious-code", "malware.name": expression @@ -112,7 +112,7 @@ def download(url: str = URL, add_default=False, params=None, include_malpedia=Fa rules.append(generate_rule(".*", add_default, name="default")) if params: - rules.extend((generate_rule(rule[0][0], rule[1][0]) for rule in params)) + rules.extend(generate_rule(rule[0][0], rule[1][0]) for rule in params) return json.dumps(rules, indent=4, separators=(',', ': ')) diff --git a/docs/autogen.py b/docs/autogen.py index 2773105cf..b38c49a9b 100644 --- a/docs/autogen.py +++ b/docs/autogen.py @@ -71,7 +71,7 @@ def harm_docs(): def info(key, value=""): - return ("* **%s:** %s\n" % (key.title(), value)).strip() + '\n' + return (f"* **{key.title()}:** {value}\n").strip() + '\n' def feeds_docs(): @@ -134,7 +134,7 @@ def feeds_docs(): if isinstance(value, (list, tuple)) and value: value = json.dumps(value) - output += " * `%s`: `%s`\n" % (key, value) + output += f" * `{key}`: `{value}`\n" output += '\n' diff --git a/intelmq/bin/intelmq_generate_misp_objects_templates.py b/intelmq/bin/intelmq_generate_misp_objects_templates.py index a960aa0ef..fe7505530 100755 --- a/intelmq/bin/intelmq_generate_misp_objects_templates.py +++ b/intelmq/bin/intelmq_generate_misp_objects_templates.py @@ -119,11 +119,11 @@ def dump_templates(self): objects = Path(args.objects) if not objects.exists(): - raise Exception('Path to misp-objects repository does not exists: {args.objects}'.format(args=args)) + raise Exception(f'Path to misp-objects repository does not exists: {args.objects}') harmonization_file = Path(args.harmonization) if not harmonization_file.exists(): - raise Exception('Path to harmonization configuration does not exists: {args.harmonization}'.format(args=args)) + raise Exception(f'Path to harmonization configuration does not exists: {args.harmonization}') g = MISPObjectTemplateGenerator(objects, harmonization_file) g.generate_templates() diff --git a/intelmq/bin/intelmq_psql_initdb.py b/intelmq/bin/intelmq_psql_initdb.py index b2b1894b0..1ad3278f2 100644 --- a/intelmq/bin/intelmq_psql_initdb.py +++ b/intelmq/bin/intelmq_psql_initdb.py @@ -28,9 +28,9 @@ def generate(harmonization_file=HARMONIZATION_CONF_FILE): try: print("INFO - Reading %s file" % harmonization_file) - with open(harmonization_file, 'r') as fp: + with open(harmonization_file) as fp: DATA = json.load(fp)['event'] - except IOError: + except OSError: print("ERROR - Could not find %s" % harmonization_file) print("ERROR - Make sure that you have intelmq installed.") sys.exit(2) @@ -69,7 +69,7 @@ def generate(harmonization_file=HARMONIZATION_CONF_FILE): initdb = """CREATE TABLE events ( "id" BIGSERIAL UNIQUE PRIMARY KEY,""" for field, field_type in sorted(FIELDS.items()): - initdb += '\n "{name}" {type},'.format(name=field, type=field_type) + initdb += f'\n "{field}" {field_type},' initdb = initdb[:-1] # remove last ',' initdb += "\n);\n" @@ -84,7 +84,7 @@ def main(): fp = None try: if os.path.exists(OUTPUTFILE): - print('INFO - File {} exists, generating temporary file.'.format(OUTPUTFILE)) + print(f'INFO - File {OUTPUTFILE} exists, generating temporary file.') os_fp, OUTPUTFILE = tempfile.mkstemp(suffix='.initdb.sql', text=True) fp = os.fdopen(os_fp, 'wt') diff --git a/intelmq/bin/intelmqctl.py b/intelmq/bin/intelmqctl.py index 9f25fc3d4..35de1a25e 100644 --- a/intelmq/bin/intelmqctl.py +++ b/intelmq/bin/intelmqctl.py @@ -39,7 +39,7 @@ psutil = None -class Parameters(object): +class Parameters: pass @@ -206,7 +206,7 @@ def __init__(self, interactive: bool = False, returntype: ReturnType = ReturnTyp try: self._runtime_configuration = utils.load_configuration(RUNTIME_CONF_FILE) except ValueError as exc: # pragma: no cover - self.abort('Error loading %r: %s' % (RUNTIME_CONF_FILE, exc)) + self.abort(f'Error loading {RUNTIME_CONF_FILE!r}: {exc}') self._processmanagertype = getattr(self._parameters, 'process_manager', 'intelmq') if self._processmanagertype not in process_managers(): @@ -828,13 +828,13 @@ def check(self, no_connections=False): try: with open(HARMONIZATION_CONF_FILE) as file_handle: files[HARMONIZATION_CONF_FILE] = json.load(file_handle) - except (IOError, ValueError) as exc: # pragma: no cover + except (OSError, ValueError) as exc: # pragma: no cover check_logger.error('Could not load %r: %s.', HARMONIZATION_CONF_FILE, exc) retval = 1 try: with open(RUNTIME_CONF_FILE) as file_handle: files[RUNTIME_CONF_FILE] = yaml.load(file_handle) - except (IOError, ValueError) as exc: + except (OSError, ValueError) as exc: check_logger.error('Could not load %r: %s.', RUNTIME_CONF_FILE, exc) retval = 1 if retval: @@ -933,7 +933,7 @@ def check(self, no_connections=False): bot_check = bot.check(bot_parameters) if bot_check: for log_line in bot_check: - getattr(check_logger, log_line[0])("Bot %r: %s" % (bot_id, log_line[1])) + getattr(check_logger, log_line[0])(f"Bot {bot_id!r}: {log_line[1]}") for group in utils.list_all_bots().values(): for bot_id, bot in group.items(): if subprocess.call(['which', bot['module']], stdout=subprocess.DEVNULL, @@ -1026,7 +1026,7 @@ def upgrade_conf(self, previous=None, dry_run=None, function=None, utils.write_configuration(state_file, state, new=True, useyaml=False) except Exception as exc: self._logger.error('Error writing state file %r: %s.', state_file, exc) - return 1, 'Error writing state file %r: %s.' % (state_file, exc) + return 1, f'Error writing state file {state_file!r}: {exc}.' self._logger.info('Successfully wrote initial state file.') runtime = utils.load_configuration(RUNTIME_CONF_FILE) @@ -1241,7 +1241,7 @@ def debug(self, sections=None): 'CONFIG_DIR', 'ROOT_DIR'): output['paths'][path] = variables[path] if self._returntype is ReturnType.TEXT: - print('%s: %r' % (path, variables[path])) + print(f'{path}: {variables[path]!r}') if sections is None or 'environment_variables' in sections: output['environment_variables'] = {} if self._returntype is ReturnType.TEXT: @@ -1251,7 +1251,7 @@ def debug(self, sections=None): 'PATH'): output['environment_variables'][variable] = os.getenv(variable) if self._returntype is ReturnType.TEXT: - print('%s: %r' % (variable, os.getenv(variable))) + print(f'{variable}: {os.getenv(variable)!r}') return 0, output def log_bot_message(self, status, *args): diff --git a/intelmq/bin/intelmqdump.py b/intelmq/bin/intelmqdump.py index 9f1d5b87f..0f860d60c 100644 --- a/intelmq/bin/intelmqdump.py +++ b/intelmq/bin/intelmqdump.py @@ -92,24 +92,24 @@ def dump_info(fname, file_descriptor=None): else: try: if file_descriptor is None: - handle = open(fname, 'rt') + handle = open(fname) fcntl.flock(handle, fcntl.LOCK_EX | fcntl.LOCK_NB) else: handle = file_descriptor except BlockingIOError: info = red('Dump file is locked.') except OSError as exc: - info = red('unable to open file: {!s}'.format(exc)) + info = red(f'unable to open file: {exc!s}') else: try: content = json.load(handle) except ValueError as exc: - info = red('unable to load JSON: {!s}'.format(exc)) + info = red(f'unable to load JSON: {exc!s}') else: try: - info = "{!s} dumps".format(len(content.keys())) + info = f"{len(content.keys())!s} dumps" except AttributeError as exc: - info = red("unable to count dumps: {!s}".format(exc)) + info = red(f"unable to count dumps: {exc!s}") finally: try: if file_descriptor is None: @@ -221,7 +221,7 @@ def main(): filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5]) for fname in sorted(filenames)] - length = max([len(value[1]) for value in filenames]) + length = max(len(value[1]) for value in filenames) print(bold("{c:>3}: {s:{length}} {i}".format(c='id', s='name (bot id)', i='content', length=length))) @@ -249,7 +249,7 @@ def main(): fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump' if not os.path.isfile(fname): - print(bold('Given file does not exist: {}'.format(fname))) + print(bold(f'Given file does not exist: {fname}')) exit(1) answer = None @@ -264,7 +264,7 @@ def main(): info = dump_info(fname, file_descriptor=handle) handle.seek(0) available_answers = ACTIONS.keys() - print('Processing {}: {}'.format(bold(botid), info)) + print(f'Processing {bold(botid)}: {info}') if info.startswith(str(red)): available_opts = [item[0] for item in ACTIONS.values() if item[2]] @@ -351,7 +351,7 @@ def main(): print('Event converted to Report automatically.') msg = message.Report(message.MessageFactory.unserialize(msg)).serialize() else: - print(red("The given queue '{}' is not configured. Please retry with a valid queue.".format(queue_name))) + print(red(f"The given queue '{queue_name}' is not configured. Please retry with a valid queue.")) break try: pipe.set_queues(queue_name, 'destination') @@ -362,12 +362,12 @@ def main(): ''.format(queue_name, traceback.format_exc()))) else: del content[key] - print(green('Recovered dump {}.'.format(i))) + print(green(f'Recovered dump {i}.')) finally: save_file(handle, content) if not content: delete_file = True - print('Deleting empty file {}'.format(fname)) + print(f'Deleting empty file {fname}') break elif answer[0] == 'd': # Delete entries or file @@ -379,7 +379,7 @@ def main(): else: # delete dumpfile delete_file = True - print('Deleting file {}'.format(fname)) + print(f'Deleting file {fname}') break elif answer[0] == 's': # Show entries by id @@ -387,7 +387,7 @@ def main(): value = copy.copy(orig_value) # otherwise the raw field gets truncated if count not in ids: continue - print('=' * 100, '\nShowing id {} {}\n'.format(count, key), + print('=' * 100, f'\nShowing id {count} {key}\n', '-' * 50) if value.get('message_type') == 'base64': if args.truncate and len(value['message']) > args.truncate: diff --git a/intelmq/bin/intelmqsetup.py b/intelmq/bin/intelmqsetup.py index e885041aa..2dc1b9b14 100755 --- a/intelmq/bin/intelmqsetup.py +++ b/intelmq/bin/intelmqsetup.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ © 2019-2021 nic.at GmbH diff --git a/intelmq/bin/rewrite_config_files.py b/intelmq/bin/rewrite_config_files.py index 9f5e7c640..9d23572a3 100755 --- a/intelmq/bin/rewrite_config_files.py +++ b/intelmq/bin/rewrite_config_files.py @@ -43,6 +43,6 @@ def rewrite(fobj): with open(fn, 'r+') as f: rewrite(f) - except IOError: + except OSError: traceback.print_exc() print('Could not open files. Wrong directory? Also see the --help.') diff --git a/intelmq/bots/collectors/file/collector_file.py b/intelmq/bots/collectors/file/collector_file.py index 597bb9b58..e76a0b497 100644 --- a/intelmq/bots/collectors/file/collector_file.py +++ b/intelmq/bots/collectors/file/collector_file.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-FileCopyrightText: 2016 by Bundesamt für Sicherheit in der Informationstechnik # # SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/intelmq/bots/collectors/github_api/_collector_github_api.py b/intelmq/bots/collectors/github_api/_collector_github_api.py index 5f3eb6ada..545670d79 100644 --- a/intelmq/bots/collectors/github_api/_collector_github_api.py +++ b/intelmq/bots/collectors/github_api/_collector_github_api.py @@ -47,18 +47,18 @@ def process_request(self): def github_api(self, api_path: str, **kwargs) -> dict: try: - response = requests.get("{}".format(api_path), params=kwargs, headers=self.__user_headers) + response = requests.get(f"{api_path}", params=kwargs, headers=self.__user_headers) if response.status_code == 401: # bad credentials raise ValueError(response.json()['message']) else: return response.json() except requests.RequestException: - raise ValueError("Unknown repository {!r}.".format(api_path)) + raise ValueError(f"Unknown repository {api_path!r}.") @staticmethod def __produce_auth_header(username: str, password: str) -> dict: - encoded_auth_bytes = base64.b64encode(bytes('{}:{}'.format(username, password), encoding='utf-8')) + encoded_auth_bytes = base64.b64encode(bytes(f'{username}:{password}', encoding='utf-8')) return { 'Authorization': 'Basic {}'.format(encoded_auth_bytes.decode('utf-8')) } diff --git a/intelmq/bots/collectors/github_api/collector_github_contents_api.py b/intelmq/bots/collectors/github_api/collector_github_contents_api.py index bc354cd05..f98efd34b 100644 --- a/intelmq/bots/collectors/github_api/collector_github_contents_api.py +++ b/intelmq/bots/collectors/github_api/collector_github_contents_api.py @@ -33,7 +33,7 @@ class GithubContentsAPICollectorBot(GithubAPICollectorBot): def init(self): super().init() if self.repository is not None: - self.__base_api_url = 'https://api.github.com/repos/{}/contents'.format(self.repository) + self.__base_api_url = f'https://api.github.com/repos/{self.repository}/contents' else: raise InvalidArgument('repository', expected='string') @@ -82,7 +82,7 @@ def __recurse_repository_files(self, base_api_url: str, extracted_github_files: if field_name in github_file: extracted_github_file_data['extra'][field_name] = github_file[field_name] else: - self.logger.warning("Field '{}' does not exist in the Github file data.".format(field_name)) + self.logger.warning(f"Field '{field_name}' does not exist in the Github file data.") extracted_github_files.append(extracted_github_file_data) return extracted_github_files diff --git a/intelmq/bots/collectors/http/collector_http.py b/intelmq/bots/collectors/http/collector_http.py index d32511ab3..f7d7d9e18 100644 --- a/intelmq/bots/collectors/http/collector_http.py +++ b/intelmq/bots/collectors/http/collector_http.py @@ -40,7 +40,7 @@ gnupg = None -class Time(object): +class Time: def __init__(self, delta=None): """ Delta is a datetime.timedelta JSON string, ex: '{"days"=-1}'. """ self.time = datetime.now() @@ -105,7 +105,7 @@ def process(self): return if result.trust_level < 1: - self.logger.debug("Trust level not defined for key {}.".format(result.key_id)) + self.logger.debug(f"Trust level not defined for key {result.key_id}.") elif result.trust_level < 3: self.logger.debug("Low trust level for key {0.key_id}: {0.trust_level}.".format(result)) @@ -165,11 +165,11 @@ def verify_signature(self, data: bytes): http_url = self.signature_url # download signature file - self.logger.info("Downloading PGP signature from {}.".format(http_url)) + self.logger.info(f"Downloading PGP signature from {http_url}.") resp = self.http_get(http_url) if resp.status_code // 100 != 2: - raise ValueError("Could not download PGP signature for report: {}.".format(resp.status_code)) + raise ValueError(f"Could not download PGP signature for report: {resp.status_code}.") self.logger.info("PGP signature downloaded.") diff --git a/intelmq/bots/collectors/kafka/collector.py b/intelmq/bots/collectors/kafka/collector.py index bbc33d1c7..ab9088d4b 100644 --- a/intelmq/bots/collectors/kafka/collector.py +++ b/intelmq/bots/collectors/kafka/collector.py @@ -33,8 +33,8 @@ def init(self): if kafka is None: raise MissingDependencyError("kafka") - self.logger.debug("Topic set to {}, bootstrap_servers set to {}".format(self.topic, self.bootstrap_servers)) - self.logger.debug("ssl_cafile set to {}, ssl_certfile set to {}, ssl_check_hostname set to {}".format(self.ssl_cafile, self.ssl_certfile, self.ssl_check_hostname)) + self.logger.debug(f"Topic set to {self.topic}, bootstrap_servers set to {self.bootstrap_servers}") + self.logger.debug(f"ssl_cafile set to {self.ssl_cafile}, ssl_certfile set to {self.ssl_certfile}, ssl_check_hostname set to {self.ssl_check_hostname}") def process(self): " Fetch messages from the Kafka server and pass them on one by one " @@ -42,7 +42,7 @@ def process(self): for msg in consumer: consumer.commit() report = self.new_report() - self.logger.debug("Received msg with offset {} from kafka topic {}: {}".format(msg.offset, msg.topic, msg.value.decode())) + self.logger.debug(f"Received msg with offset {msg.offset} from kafka topic {msg.topic}: {msg.value.decode()}") if report.add('raw', msg.value.decode()): self.send_message(report) diff --git a/intelmq/bots/collectors/rsync/collector_rsync.py b/intelmq/bots/collectors/rsync/collector_rsync.py index ab1b86e06..2432dd6f6 100644 --- a/intelmq/bots/collectors/rsync/collector_rsync.py +++ b/intelmq/bots/collectors/rsync/collector_rsync.py @@ -24,7 +24,7 @@ def init(self): pass def process(self): - self.logger.info("Updating file {}.".format(self.file)) + self.logger.info(f"Updating file {self.file}.") process = run(["rsync", path.join(self.rsync_path, self.file), self.temp_directory], stderr=PIPE) @@ -34,7 +34,7 @@ def process(self): process.returncode, process.stderr)) report = self.new_report() - with open(path.join(self.temp_directory, self.file), "r") as rsync_file: + with open(path.join(self.temp_directory, self.file)) as rsync_file: report.add("raw", rsync_file.read()) self.send_message(report) diff --git a/intelmq/bots/collectors/shadowserver/collector_reports_api.py b/intelmq/bots/collectors/shadowserver/collector_reports_api.py index f35b7c859..09b0d0fde 100644 --- a/intelmq/bots/collectors/shadowserver/collector_reports_api.py +++ b/intelmq/bots/collectors/shadowserver/collector_reports_api.py @@ -54,7 +54,7 @@ def init(self): if isinstance(self.types, str): self.types = self.types.split(',') - self.preamble = '{{ "apikey": "{}" '.format(self.api_key) + self.preamble = f'{{ "apikey": "{self.api_key}" ' def _headers(self, data): return {'HMAC2': hmac.new(self.secret.encode(), data.encode('utf-8'), digestmod=hashlib.sha256).hexdigest()} @@ -76,8 +76,8 @@ def _reports_list(self, date=None): dayafter = date + timedelta(1) data = self.preamble - data += ',"report": ["{}"] '.format(self.country) - data += ',"date": "{}:{}" '.format(daybefore.isoformat(), dayafter.isoformat()) + data += f',"report": ["{self.country}"] ' + data += f',"date": "{daybefore.isoformat()}:{dayafter.isoformat()}" ' data += '}' self.logger.debug('Downloading report list with data: %s.', data) @@ -101,7 +101,7 @@ def _report_download(self, reportid: str): Download one report from the shadowserver API via the reports/download endpoint """ data = self.preamble - data += ',"id": "{}"}}'.format(reportid) + data += f',"id": "{reportid}"}}' self.logger.debug('Downloading report with data: %s.', data) response = self.http_session().post(APIROOT + 'reports/download', data=data, headers=self._headers(data)) diff --git a/intelmq/bots/collectors/tcp/collector.py b/intelmq/bots/collectors/tcp/collector.py index 014ad470f..ce524a96f 100644 --- a/intelmq/bots/collectors/tcp/collector.py +++ b/intelmq/bots/collectors/tcp/collector.py @@ -54,7 +54,7 @@ def process(self): if msg: # if the partner connection ended, our message are already sent conn.sendall(b"Ok") pass - except socket.error: + except OSError: self.logger.exception("Socket error.") finally: if conn: diff --git a/intelmq/bots/collectors/twitter/collector_twitter.py b/intelmq/bots/collectors/twitter/collector_twitter.py index febb42c2f..f8e3fba99 100644 --- a/intelmq/bots/collectors/twitter/collector_twitter.py +++ b/intelmq/bots/collectors/twitter/collector_twitter.py @@ -114,7 +114,7 @@ def process(self): report.add('raw', tweet.full_text) report.add( 'feed.url', - 'https://twitter.com/{}/status/{}'.format(tweet.user.screen_name, tweet.id)) + f'https://twitter.com/{tweet.user.screen_name}/status/{tweet.id}') self.send_message(report) if tweet.user.screen_name in self._follow_urls: if len(tweet.urls) > 0: @@ -127,7 +127,7 @@ def process(self): report.add('feed.code', 'url_text') report.add( 'feed.url', - 'https://twitter.com/{}/status/{}'.format(tweet.user.screen_name, tweet.id)) + f'https://twitter.com/{tweet.user.screen_name}/status/{tweet.id}') self.send_message(report) diff --git a/intelmq/bots/experts/aggregate/expert.py b/intelmq/bots/experts/aggregate/expert.py index d8982588f..5b885fe0a 100644 --- a/intelmq/bots/experts/aggregate/expert.py +++ b/intelmq/bots/experts/aggregate/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Aggregate Expert diff --git a/intelmq/bots/experts/asn_lookup/expert.py b/intelmq/bots/experts/asn_lookup/expert.py index ce7adadf2..b1fa7caa6 100644 --- a/intelmq/bots/experts/asn_lookup/expert.py +++ b/intelmq/bots/experts/asn_lookup/expert.py @@ -33,7 +33,7 @@ def init(self): try: self._database = pyasn.pyasn(self.database) - except IOError: + except OSError: self.logger.error("pyasn data file does not exist or could not be " "accessed in %r.", self.database) self.logger.error("Read 'bots/experts/asn_lookup/README' and " @@ -100,11 +100,11 @@ def update_database(cls, verbose=False): bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: - sys.exit("Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e)) + sys.exit(f"Database update failed. Your configuration of {bot} is missing key {e}.") if not bots: if verbose: - print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) + print(f"Database update skipped. No bots of type {__name__} present in runtime.conf.") sys.exit(0) # we only need to import now. If there are no asn_lookup bots, this dependency does not need to be installed @@ -145,11 +145,11 @@ def update_database(cls, verbose=False): response = session.get(url) if response.status_code != 200: - sys.exit("Database update failed. Server responded: {0}.\n" - "URL: {1}".format(response.status_code, response.url)) + sys.exit("Database update failed. Server responded: {}.\n" + "URL: {}".format(response.status_code, response.url)) except requests.exceptions.RequestException as e: - sys.exit("Database update failed. Connection Error: {0}".format(e)) + sys.exit(f"Database update failed. Connection Error: {e}") with bz2.open(io.BytesIO(response.content)) as archive: if verbose: diff --git a/intelmq/bots/experts/domain_suffix/_lib.py b/intelmq/bots/experts/domain_suffix/_lib.py index 7b520c253..a39021ec3 100644 --- a/intelmq/bots/experts/domain_suffix/_lib.py +++ b/intelmq/bots/experts/domain_suffix/_lib.py @@ -10,7 +10,7 @@ """ -class PublicSuffixList(object): +class PublicSuffixList: def __init__(self, source, only_icann=None): self.suffixes = {} icann_section = False diff --git a/intelmq/bots/experts/domain_suffix/expert.py b/intelmq/bots/experts/domain_suffix/expert.py index b06854f99..1961dc747 100644 --- a/intelmq/bots/experts/domain_suffix/expert.py +++ b/intelmq/bots/experts/domain_suffix/expert.py @@ -88,11 +88,11 @@ def update_database(cls, verbose=False): bots[bot] = runtime_conf[bot]["parameters"]["suffix_file"] except KeyError as e: - sys.exit("Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e)) + sys.exit(f"Database update failed. Your configuration of {bot} is missing key {e}.") if not bots: if verbose: - print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) + print(f"Database update skipped. No bots of type {__name__} present in runtime.conf.") sys.exit(0) # we only need to import now. If there are no asn_lookup bots, this dependency does not need to be installed @@ -105,11 +105,11 @@ def update_database(cls, verbose=False): response = session.get(url) if not response.ok: - sys.exit("Database update failed. Server responded: {0}.\n" - "URL: {1}".format(response.status_code, response.url)) + sys.exit("Database update failed. Server responded: {}.\n" + "URL: {}".format(response.status_code, response.url)) except requests.exceptions.RequestException as e: - sys.exit("Database update failed. Connection Error: {0}".format(e)) + sys.exit(f"Database update failed. Connection Error: {e}") for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent diff --git a/intelmq/bots/experts/domain_valid/expert.py b/intelmq/bots/experts/domain_valid/expert.py index 249decac6..642ecfe3b 100644 --- a/intelmq/bots/experts/domain_valid/expert.py +++ b/intelmq/bots/experts/domain_valid/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Domain validator @@ -81,11 +80,11 @@ def update_database(cls, verbose=False): bots[bot] = runtime_conf[bot]["parameters"]["tlds_domains_list"] except KeyError as e: - sys.exit("Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e)) + sys.exit(f"Database update failed. Your configuration of {bot} is missing key {e}.") if not bots: if verbose: - print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) + print(f"Database update skipped. No bots of type {__name__} present in runtime.conf.") sys.exit(0) try: @@ -96,11 +95,11 @@ def update_database(cls, verbose=False): response = session.get(url) if not response.ok: - sys.exit("Database update failed. Server responded: {0}.\n" - "URL: {1}".format(response.status_code, response.url)) + sys.exit("Database update failed. Server responded: {}.\n" + "URL: {}".format(response.status_code, response.url)) except requests.exceptions.RequestException as e: - sys.exit("Database update failed. Connection Error: {0}".format(e)) + sys.exit(f"Database update failed. Connection Error: {e}") for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent diff --git a/intelmq/bots/experts/idea/expert.py b/intelmq/bots/experts/idea/expert.py index 1fbee7edb..f9b3724ac 100644 --- a/intelmq/bots/experts/idea/expert.py +++ b/intelmq/bots/experts/idea/expert.py @@ -106,7 +106,7 @@ def init(self): # extra - too informal, will consider based on real world data "Format": lambda s: "IDEA0", - "Description": lambda s: "%s: %s" % ( + "Description": lambda s: "{}: {}".format( s["feed.name"], s.get("event_description.text", s.get("comment", diff --git a/intelmq/bots/experts/maxmind_geoip/expert.py b/intelmq/bots/experts/maxmind_geoip/expert.py index 680bb4b01..50e9e8ae3 100644 --- a/intelmq/bots/experts/maxmind_geoip/expert.py +++ b/intelmq/bots/experts/maxmind_geoip/expert.py @@ -38,7 +38,7 @@ def init(self): try: self.database = geoip2.database.Reader(self.database) - except IOError: + except OSError: self.logger.exception("GeoIP Database does not exist or could not " "be accessed in %r.", self.database) @@ -118,7 +118,7 @@ def update_database(cls, verbose=False): bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: - error = "Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e) + error = f"Database update failed. Your configuration of {bot} is missing key {e}." if str(e) == "'license_key'": error += "\n" error += "Since December 30, 2019 you need to register for a free license key to access GeoLite2 database.\n" @@ -129,7 +129,7 @@ def update_database(cls, verbose=False): if not bots: if verbose: - print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) + print(f"Database update skipped. No bots of type {__name__} present in runtime.conf.") sys.exit(0) # we only need to import now, if there are no maxmind_geoip bots, this dependency does not need to be installed @@ -151,14 +151,14 @@ def update_database(cls, verbose=False): "suffix": "tar.gz" }) except requests.exceptions.RequestException as e: - sys.exit("Database update failed. Connection Error: {0}".format(e)) + sys.exit(f"Database update failed. Connection Error: {e}") if response.status_code == 401: sys.exit("Database update failed. Your license key is invalid.") if response.status_code != 200: - sys.exit("Database update failed. Server responded: {0}.\n" - "URL: {1}".format(response.status_code, response.url)) + sys.exit("Database update failed. Server responded: {}.\n" + "URL: {}".format(response.status_code, response.url)) database_data = None diff --git a/intelmq/bots/experts/national_cert_contact_certat/expert.py b/intelmq/bots/experts/national_cert_contact_certat/expert.py index 56bd1a29a..b7a7035e2 100644 --- a/intelmq/bots/experts/national_cert_contact_certat/expert.py +++ b/intelmq/bots/experts/national_cert_contact_certat/expert.py @@ -66,7 +66,7 @@ def process(self): continue response = req.text.strip().split(';') - ccfield = '{}.geolocation.cc'.format(section) + ccfield = f'{section}.geolocation.cc' if self.overwrite_cc or ccfield not in event: event.add(ccfield, response[1]) diff --git a/intelmq/bots/experts/rdap/expert.py b/intelmq/bots/experts/rdap/expert.py index 6aad0352c..a786b87f8 100644 --- a/intelmq/bots/experts/rdap/expert.py +++ b/intelmq/bots/experts/rdap/expert.py @@ -85,7 +85,7 @@ def process(self): domain_parts.pop(0) url_without_domain_suffix = url.replace(".%s" % (domain_suffix), "") - url = "%s.%s" % (url_without_domain_suffix.split(".")[-1], domain_suffix) + url = "{}.{}".format(url_without_domain_suffix.split(".")[-1], domain_suffix) if domain_suffix in self.__rdap_directory: service = self.__rdap_directory[domain_suffix] @@ -93,9 +93,9 @@ def process(self): if service['auth']['type'] == 'jwt': self.__session.headers['Authorization'] = "Bearer %s" % (service['auth']['token']) else: - raise NotImplementedError("Authentication type %r (configured for service %r) is not implemented" % (service['auth'], domain_suffix)) + raise NotImplementedError("Authentication type {!r} (configured for service {!r}) is not implemented".format(service['auth'], domain_suffix)) - resp = self.__session.get("{0}domain/{1}".format(service['url'], url)) + resp = self.__session.get("{}domain/{}".format(service['url'], url)) if resp.status_code < 200 or resp.status_code > 299: if resp.status_code == 404: diff --git a/intelmq/bots/experts/recordedfuture_iprisk/expert.py b/intelmq/bots/experts/recordedfuture_iprisk/expert.py index 0bdd6dc90..751130fae 100644 --- a/intelmq/bots/experts/recordedfuture_iprisk/expert.py +++ b/intelmq/bots/experts/recordedfuture_iprisk/expert.py @@ -36,7 +36,7 @@ def init(self): for row in rfreader: self._database[row['Name']] = int(row['Risk']) - except IOError: + except OSError: raise ValueError("Recorded future risklist not defined or failed on open.") def process(self): @@ -82,11 +82,11 @@ def update_database(cls, verbose=False): bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: - sys.exit("Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e)) + sys.exit(f"Database update failed. Your configuration of {bot} is missing key {e}.") if not bots: if verbose: - print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) + print(f"Database update skipped. No bots of type {__name__} present in runtime.conf.") sys.exit(0) try: @@ -104,14 +104,14 @@ def update_database(cls, verbose=False): }) except requests.exceptions.RequestException as e: - sys.exit("Database update failed. Connection Error: {0}".format(e)) + sys.exit(f"Database update failed. Connection Error: {e}") if response.status_code == 401: sys.exit("Database update failed. Your API token is invalid.") if response.status_code != 200: - sys.exit("Database update failed. Server responded: {0}.\n" - "URL: {1}".format(response.status_code, response.url)) + sys.exit("Database update failed. Server responded: {}.\n" + "URL: {}".format(response.status_code, response.url)) database_data = None diff --git a/intelmq/bots/experts/remove_affix/expert.py b/intelmq/bots/experts/remove_affix/expert.py index b2faf544d..a2b7ca63b 100644 --- a/intelmq/bots/experts/remove_affix/expert.py +++ b/intelmq/bots/experts/remove_affix/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Remove Affix diff --git a/intelmq/bots/experts/ripe/expert.py b/intelmq/bots/experts/ripe/expert.py index 008678814..b347dfe2e 100644 --- a/intelmq/bots/experts/ripe/expert.py +++ b/intelmq/bots/experts/ripe/expert.py @@ -127,7 +127,7 @@ def process(self): self.acknowledge_message() def __perform_cached_query(self, type, resource): - cached_value = self.cache_get('{}:{}'.format(type, resource)) + cached_value = self.cache_get(f'{type}:{resource}') if cached_value: if cached_value == CACHE_NO_VALUE: return {} @@ -142,7 +142,7 @@ def __perform_cached_query(self, type, resource): """ If no abuse contact could be found, a 404 is given. """ try: if response.json()['message'].startswith('No abuse contact found for '): - self.cache_set('{}:{}'.format(type, resource), CACHE_NO_VALUE) + self.cache_set(f'{type}:{resource}', CACHE_NO_VALUE) return {} except ValueError: pass @@ -159,11 +159,11 @@ def __perform_cached_query(self, type, resource): '' % (type, status)) data = self.REPLY_TO_DATA[type](response_data) - self.cache_set('{}:{}'.format(type, resource), + self.cache_set(f'{type}:{resource}', (json.dumps(list(data) if isinstance(data, set) else data) if data else CACHE_NO_VALUE)) return data except (KeyError, IndexError): - self.cache_set('{}:{}'.format(type, resource), CACHE_NO_VALUE) + self.cache_set(f'{type}:{resource}', CACHE_NO_VALUE) return {} diff --git a/intelmq/bots/experts/splunk_saved_search/expert.py b/intelmq/bots/experts/splunk_saved_search/expert.py index 7f755f202..94d85a0f7 100644 --- a/intelmq/bots/experts/splunk_saved_search/expert.py +++ b/intelmq/bots/experts/splunk_saved_search/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Splunk saved search enrichment export bot SPDX-FileCopyrightText: 2020 Linköping University @@ -106,14 +105,14 @@ def init(self): self.set_request_parameters() - self.http_header.update({"Authorization": "Bearer {}".format(self.auth_token)}) + self.http_header.update({"Authorization": f"Bearer {self.auth_token}"}) self.session = utils.create_request_session(self) self.session.keep_alive = False def update_event(self, event, search_result): self.logger.info("Updating event: %s", - dict([(field, search_result[field]) for field in self.result_fields])) + {field: search_result[field] for field in self.result_fields}) for result, field in self.result_fields.items(): event.add(field, search_result[result], overwrite=self.overwrite) @@ -128,11 +127,11 @@ def process(self): return self.logger.debug("Received event, searching for %s", - dict([(parameter, event[field]) for field, parameter in self.search_parameters.items()])) + {parameter: event[field] for field, parameter in self.search_parameters.items()}) - query = '|savedsearch "{saved_search}"'.format(saved_search=self.saved_search) + query = f'|savedsearch "{self.saved_search}"' for field, parameter in self.search_parameters.items(): - query += ' "{parameter}"="{field}"'.format(parameter=parameter, field=event[field]) + query += f' "{parameter}"="{event[field]}"' if "limit" in self.multiple_result_handling: query += " | head 1" diff --git a/intelmq/bots/experts/threshold/expert.py b/intelmq/bots/experts/threshold/expert.py index d64d8cd54..6cef3ba34 100644 --- a/intelmq/bots/experts/threshold/expert.py +++ b/intelmq/bots/experts/threshold/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Threshold value expert bot SPDX-FileCopyrightText: 2020 Linköping University diff --git a/intelmq/bots/experts/tor_nodes/expert.py b/intelmq/bots/experts/tor_nodes/expert.py index 31c0fbe85..34c5b0f08 100644 --- a/intelmq/bots/experts/tor_nodes/expert.py +++ b/intelmq/bots/experts/tor_nodes/expert.py @@ -36,7 +36,7 @@ def init(self): self._database.add(line) - except IOError: + except OSError: raise ValueError("TOR rule not defined or failed on open.") def process(self): @@ -82,11 +82,11 @@ def update_database(cls, verbose=False): bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: - sys.exit("Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e)) + sys.exit(f"Database update failed. Your configuration of {bot} is missing key {e}.") if not bots: if verbose: - print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) + print(f"Database update skipped. No bots of type {__name__} present in runtime.conf.") sys.exit(0) try: @@ -95,11 +95,11 @@ def update_database(cls, verbose=False): session = create_request_session() response = session.get("https://check.torproject.org/exit-addresses") except requests.exceptions.RequestException as e: - sys.exit("Database update failed. Connection Error: {0}".format(e)) + sys.exit(f"Database update failed. Connection Error: {e}") if response.status_code != 200: - sys.exit("Database update failed. Server responded: {0}.\n" - "URL: {1}".format(response.status_code, response.url)) + sys.exit("Database update failed. Server responded: {}.\n" + "URL: {}".format(response.status_code, response.url)) pattern = re.compile(r"ExitAddress ([^\s]+)") tor_exits = "\n".join(pattern.findall(response.text)) diff --git a/intelmq/bots/experts/truncate_by_delimiter/expert.py b/intelmq/bots/experts/truncate_by_delimiter/expert.py index 68e57fbda..668112e85 100644 --- a/intelmq/bots/experts/truncate_by_delimiter/expert.py +++ b/intelmq/bots/experts/truncate_by_delimiter/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Cut string if length is bigger than max diff --git a/intelmq/bots/experts/trusted_introducer_lookup/expert.py b/intelmq/bots/experts/trusted_introducer_lookup/expert.py index b49629a90..0e87cf92f 100644 --- a/intelmq/bots/experts/trusted_introducer_lookup/expert.py +++ b/intelmq/bots/experts/trusted_introducer_lookup/expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Trusted Introducer Expert diff --git a/intelmq/bots/outputs/bro_file/output.py b/intelmq/bots/outputs/bro_file/output.py index 6e31fa2af..7479bdfb2 100644 --- a/intelmq/bots/outputs/bro_file/output.py +++ b/intelmq/bots/outputs/bro_file/output.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Bro file output @@ -54,7 +53,7 @@ def open_file(self, filename: str = None): path = Path(os.path.dirname(filename)) try: path.mkdir(mode=0o755, parents=True, exist_ok=True) - except IOError: + except OSError: self.logger.exception('Directory %r could not be created.', path) self.stop() else: @@ -133,7 +132,7 @@ def check(parameters): path = Path(dirname) try: path.mkdir(mode=0o755, parents=True, exist_ok=True) - except IOError: + except OSError: return [ ["error", "Directory (%r) of parameter 'file' does not exist and could not be created." % dirname]] else: diff --git a/intelmq/bots/outputs/elasticsearch/output.py b/intelmq/bots/outputs/elasticsearch/output.py index 5b69c280a..85cc608c0 100644 --- a/intelmq/bots/outputs/elasticsearch/output.py +++ b/intelmq/bots/outputs/elasticsearch/output.py @@ -160,7 +160,7 @@ def get_index(self, event_dict: dict, default_date: datetime.date = None, # If no time available in the event and no default date is given, use the default catchall string event_date = event_date.strftime(ROTATE_OPTIONS.get(self.rotate_index)) if event_date else default_string - return "{}-{}".format(self.elastic_index, event_date) + return f"{self.elastic_index}-{event_date}" else: # If the bot should NOT rotate indices, just use the index name return self.elastic_index diff --git a/intelmq/bots/outputs/file/output.py b/intelmq/bots/outputs/file/output.py index d8dd37841..029154ef9 100644 --- a/intelmq/bots/outputs/file/output.py +++ b/intelmq/bots/outputs/file/output.py @@ -44,7 +44,7 @@ def open_file(self, filename: str = None): path = Path(os.path.dirname(filename)) try: path.mkdir(mode=0o755, parents=True, exist_ok=True) - except IOError: + except OSError: self.logger.exception('Directory %r could not be created.', path) self.stop() else: @@ -98,7 +98,7 @@ def check(parameters): path = Path(dirname) try: path.mkdir(mode=0o755, parents=True, exist_ok=True) - except IOError: + except OSError: return [["error", "Directory (%r) of parameter 'file' does not exist and could not be created." % dirname]] else: return [["info", "Directory (%r) of parameter 'file' did not exist, but has now been created." % dirname]] diff --git a/intelmq/bots/outputs/files/output.py b/intelmq/bots/outputs/files/output.py index 486a3cf34..ea4a61840 100644 --- a/intelmq/bots/outputs/files/output.py +++ b/intelmq/bots/outputs/files/output.py @@ -67,7 +67,7 @@ def create_unique_file(self): # Now we know the device/inode, rename to raise uniqueness within the whole filesystem newname = self._get_new_name(fd) os.rename(path.join(self.tmp, tmpname), path.join(self.tmp, newname)) - nf = io.open(fd, "w", encoding="utf-8") + nf = open(fd, "w", encoding="utf-8") return nf, newname def process(self): diff --git a/intelmq/bots/outputs/misp/output_api.py b/intelmq/bots/outputs/misp/output_api.py index ed7a35501..ff3ca1685 100644 --- a/intelmq/bots/outputs/misp/output_api.py +++ b/intelmq/bots/outputs/misp/output_api.py @@ -221,7 +221,7 @@ def _insert_misp_event(self, intelmq_event): if self.misp_publish: self.misp.publish(misp_event) self.logger.info( - 'Inserted new MISP event with id: {}'.format(misp_event.id)) + f'Inserted new MISP event with id: {misp_event.id}') @staticmethod def check(parameters): diff --git a/intelmq/bots/outputs/misp/output_feed.py b/intelmq/bots/outputs/misp/output_feed.py index e5e836552..7c730f976 100644 --- a/intelmq/bots/outputs/misp/output_feed.py +++ b/intelmq/bots/outputs/misp/output_feed.py @@ -99,7 +99,7 @@ def process(self): self.current_event.set_date(datetime.date.today()) self.current_event.Orgc = self.misp_org self.current_event.uuid = str(uuid4()) - self.current_file = self.output_dir / '{self.current_event.uuid}.json'.format(self=self) + self.current_file = self.output_dir / f'{self.current_event.uuid}.json' with (self.output_dir / '.current').open('w') as f: f.write(str(self.current_file)) @@ -127,7 +127,7 @@ def check(parameters): return [["error", "Parameter 'output_dir' not given."]] try: created = MISPFeedOutputBot.check_output_dir(parameters['output_dir']) - except IOError: + except OSError: return [["error", "Directory %r of parameter 'output_dir' does not exist and could not be created." % parameters['output_dir']]] else: diff --git a/intelmq/bots/outputs/mongodb/output.py b/intelmq/bots/outputs/mongodb/output.py index e1cfb1281..b4b473c60 100644 --- a/intelmq/bots/outputs/mongodb/output.py +++ b/intelmq/bots/outputs/mongodb/output.py @@ -78,7 +78,7 @@ def connect(self): db.authenticate(name=self.db_user, password=self.db_pass) except pymongo.errors.OperationFailure: - raise ValueError('Authentication to database {} failed'.format(self.database)) + raise ValueError(f'Authentication to database {self.database} failed') self._collection = db[self.collection] self.logger.info('Successfully connected to MongoDB server.') diff --git a/intelmq/bots/outputs/rpz_file/output.py b/intelmq/bots/outputs/rpz_file/output.py index 5d3d0adc7..e98bb9c9b 100644 --- a/intelmq/bots/outputs/rpz_file/output.py +++ b/intelmq/bots/outputs/rpz_file/output.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ RPZ file output @@ -82,7 +80,7 @@ def open_file(self, filename: str = None): path = Path(os.path.dirname(filename)) try: path.mkdir(mode=0o755, parents=True, exist_ok=True) - except IOError: + except OSError: self.logger.exception('Directory %r could not be created.', path) self.stop() else: @@ -154,7 +152,7 @@ def check(parameters): path = Path(dirname) try: path.mkdir(mode=0o755, parents=True, exist_ok=True) - except IOError: + except OSError: return [ ["error", "Directory (%r) of parameter 'file' does not exist and could not be created." % dirname]] else: diff --git a/intelmq/bots/outputs/sql/output.py b/intelmq/bots/outputs/sql/output.py index b976b1920..30c5f1976 100644 --- a/intelmq/bots/outputs/sql/output.py +++ b/intelmq/bots/outputs/sql/output.py @@ -38,7 +38,7 @@ def process(self): keys = '", "'.join(event.keys()) values = list(event.values()) - fvalues = len(values) * '{0}, '.format(self.format_char) + fvalues = len(values) * f'{self.format_char}, ' query = ('INSERT INTO {table} ("{keys}") VALUES ({values})' ''.format(table=self.table, keys=keys, values=fvalues[:-2])) diff --git a/intelmq/bots/outputs/tcp/output.py b/intelmq/bots/outputs/tcp/output.py index 991c92580..8d09d6df1 100644 --- a/intelmq/bots/outputs/tcp/output.py +++ b/intelmq/bots/outputs/tcp/output.py @@ -64,7 +64,7 @@ def process(self): time.sleep(1) else: break - except socket.error as e: + except OSError as e: self.logger.exception("Reconnecting, %s", e) self.con.close() self.connect() diff --git a/intelmq/bots/outputs/templated_smtp/output.py b/intelmq/bots/outputs/templated_smtp/output.py index 49ade3ece..4548df6bb 100644 --- a/intelmq/bots/outputs/templated_smtp/output.py +++ b/intelmq/bots/outputs/templated_smtp/output.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Templated SMTP output bot SPDX-FileCopyrightText: 2021 Linköping University diff --git a/intelmq/bots/parsers/anubisnetworks/parser.py b/intelmq/bots/parsers/anubisnetworks/parser.py index dfc6d243c..5b1be6c13 100644 --- a/intelmq/bots/parsers/anubisnetworks/parser.py +++ b/intelmq/bots/parsers/anubisnetworks/parser.py @@ -87,7 +87,7 @@ def process(self): if k in value: event[v] = value[k] if "ip" in value and "netmask" in value: - event.add('source.network', '%s/%s' % (value["ip"], value["netmask"])) + event.add('source.network', '{}/{}'.format(value["ip"], value["netmask"])) elif key == 'qtype': event['extra.dns_query_type'] = value elif key == 'app_proto': @@ -141,9 +141,7 @@ def process(self): raise ValueError("Unable to parse data field comm.http.%r. Please report this as bug." % subsubkey) try: event.add('destination.url', - '%s://%s%s' % (value['proto'], - subvalue['host'], - subvalue['path'])) + f"{value['proto']}://{subvalue['host']}{subvalue['path']}") except KeyError: pass elif subkey == 'dns': @@ -220,7 +218,7 @@ def parse_geo(self, event, value, namespace, raw_report, orig_name): elif subkey == "netmask": event = self.event_add_fallback(event, '%s.network' % namespace, - '%s/%s' % (value['ip'], subvalue)) + '{}/{}'.format(value['ip'], subvalue)) elif subkey == 'country_code': event = self.event_add_fallback(event, '%s.geolocation.cc' % namespace, @@ -232,7 +230,7 @@ def parse_geo(self, event, value, namespace, raw_report, orig_name): elif subkey in ('region_code', 'postal_code', "region", "city", "latitude", "longitude", "dma_code", "area_code", "metro_code"): - event = self.event_add_fallback(event, '%s.geolocation.%s' % (namespace, subkey), subvalue) + event = self.event_add_fallback(event, f'{namespace}.geolocation.{subkey}', subvalue) elif subkey == 'asn': event = self.event_add_fallback(event, '%s.asn' % namespace, subvalue) elif subkey == 'asn_name': diff --git a/intelmq/bots/parsers/cymru/parser_cap_program.py b/intelmq/bots/parsers/cymru/parser_cap_program.py index dbdd8eaf2..efae6d977 100644 --- a/intelmq/bots/parsers/cymru/parser_cap_program.py +++ b/intelmq/bots/parsers/cymru/parser_cap_program.py @@ -90,7 +90,7 @@ def parse_bot_old(self, comment_split, report_type, event): elif kind in ('destaddr', 'dstaddr'): event['destination.ip'] = value else: - raise ValueError('Unknown value in comment %r for report %r.' % (kind, report_type)) + raise ValueError(f'Unknown value in comment {kind!r} for report {report_type!r}.') if event_comment: event.add('event_description.text', ' '.join(event_comment)) @@ -255,10 +255,7 @@ def parse_line_new(self, line, report): if bogus: span = bogus.span() groups = bogus.groups() - notes = '%shostname: %s; port: %s%s' % (notes[:span[0]], - groups[0], - groups[1], - notes[span[1]:]) + notes = f'{notes[:span[0]]}hostname: {groups[0]}; port: {groups[1]}{notes[span[1]:]}' comment_split = list(filter(lambda x: x, notes.split(';'))) asninfo_split = asninfo.split(', ') @@ -299,7 +296,7 @@ def parse_line_new(self, line, report): pass else: break - raise ValueError('Unable to parse comment %r of category %r. Please report this.' % (comment, category)) + raise ValueError(f'Unable to parse comment {comment!r} of category {category!r}. Please report this.') key, value = comment.split(':', 1) key = key.strip() value = value.strip() @@ -338,7 +335,7 @@ def parse_line_new(self, line, report): elif key == 'additional_asns': event['extra.source.asns'] = [event['source.asn']] + list(map(int, value.split(','))) else: - raise ValueError('Unknown key %r in comment of category %r. Please report this.' % (key, category)) + raise ValueError(f'Unknown key {key!r} in comment of category {category!r}. Please report this.') for destination_port in destination_ports: ev = self.new_event(event) ev['destination.port'] = destination_port diff --git a/intelmq/bots/parsers/cznic/parser_proki.py b/intelmq/bots/parsers/cznic/parser_proki.py index f55cbafb7..3ce71c3f8 100644 --- a/intelmq/bots/parsers/cznic/parser_proki.py +++ b/intelmq/bots/parsers/cznic/parser_proki.py @@ -22,8 +22,7 @@ def parse(self, report): # extract event list from received JSON report = report.get("data") - for line in report: - yield line + yield from report def parse_line(self, line, report): event = self.new_event(report) diff --git a/intelmq/bots/parsers/dshield/parser_block.py b/intelmq/bots/parsers/dshield/parser_block.py index 6331576d8..2a26f3cfa 100644 --- a/intelmq/bots/parsers/dshield/parser_block.py +++ b/intelmq/bots/parsers/dshield/parser_block.py @@ -55,7 +55,7 @@ def process(self): network_ip = values[0] network_mask = values[2] - network = '%s/%s' % (network_ip, network_mask) + network = f'{network_ip}/{network_mask}' extra = {} event = self.new_event(report) diff --git a/intelmq/bots/parsers/github_feed/parser.py b/intelmq/bots/parsers/github_feed/parser.py index 0fcfdd79e..341f739d7 100644 --- a/intelmq/bots/parsers/github_feed/parser.py +++ b/intelmq/bots/parsers/github_feed/parser.py @@ -42,7 +42,7 @@ def process(self): try: decoded_content = json.loads(base64_decode(report['raw']).replace("'", '"')) except json.JSONDecodeError as e: - self.logger.error("Invalid report['raw']: {}".format(e)) + self.logger.error(f"Invalid report['raw']: {e}") self.acknowledge_message() return @@ -132,7 +132,7 @@ def parse_domain_indicator(event, ioc_indicator: str): def parse_hash_indicator(event, ioc_indicator: str, hash_type: str): - event.add('malware.hash.{}'.format(hash_type), ioc_indicator) + event.add(f'malware.hash.{hash_type}', ioc_indicator) event.change('classification.taxonomy', 'other') event.change('classification.type', 'malware') return event diff --git a/intelmq/bots/parsers/key_value/parser.py b/intelmq/bots/parsers/key_value/parser.py index 62ecf1dab..61beaec3c 100644 --- a/intelmq/bots/parsers/key_value/parser.py +++ b/intelmq/bots/parsers/key_value/parser.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parse a string of key=value pairs. SPDX-FileCopyrightText: 2020 Linköping University diff --git a/intelmq/bots/parsers/microsoft/parser_ctip.py b/intelmq/bots/parsers/microsoft/parser_ctip.py index a18177ed9..1a44ebb98 100644 --- a/intelmq/bots/parsers/microsoft/parser_ctip.py +++ b/intelmq/bots/parsers/microsoft/parser_ctip.py @@ -275,7 +275,7 @@ def parse_azure(self, line, report): del line[key] if isinstance(value, dict): for subkey, subvalue in value.items(): - line['%s.%s' % (key, subkey)] = subvalue + line[f'{key}.{subkey}'] = subvalue del line[key] for key, value in line.items(): if key == 'ThreatConfidence': diff --git a/intelmq/bots/parsers/shadowserver/parser.py b/intelmq/bots/parsers/shadowserver/parser.py index 67dda9b3d..a34efa96d 100644 --- a/intelmq/bots/parsers/shadowserver/parser.py +++ b/intelmq/bots/parsers/shadowserver/parser.py @@ -67,10 +67,10 @@ def parse(self, report): filename_search = self.__is_filename_regex.search(self.report_name) if not filename_search: - raise ValueError("Report's 'extra.file_name' {!r} is not valid.".format(self.report_name)) + raise ValueError(f"Report's 'extra.file_name' {self.report_name!r} is not valid.") else: self.report_name = filename_search.group(1) - self.logger.debug("Detected report's file name: {!r}.".format(self.report_name)) + self.logger.debug(f"Detected report's file name: {self.report_name!r}.") retval = config.get_feed_by_filename(self.report_name) if not retval: diff --git a/intelmq/bots/parsers/shadowserver/parser_json.py b/intelmq/bots/parsers/shadowserver/parser_json.py index 4d4994498..893ad877b 100644 --- a/intelmq/bots/parsers/shadowserver/parser_json.py +++ b/intelmq/bots/parsers/shadowserver/parser_json.py @@ -47,7 +47,7 @@ def parse(self, report): filename_search = self.__is_filename_regex.search(report_name) if not filename_search: - raise ValueError("Report's 'extra.file_name' {!r} is not valid.".format(report_name)) + raise ValueError(f"Report's 'extra.file_name' {report_name!r} is not valid.") report_name = filename_search.group(1) self.logger.debug("Detected report's file name: %s.", report_name) diff --git a/intelmq/bots/parsers/shodan/parser.py b/intelmq/bots/parsers/shodan/parser.py index 98c06a8b5..5b7a9642b 100644 --- a/intelmq/bots/parsers/shodan/parser.py +++ b/intelmq/bots/parsers/shodan/parser.py @@ -497,24 +497,24 @@ def _dict_dict_to_obj_list(x: Dict[str, Dict[str, Any]], identifier: str = 'iden return out -def _get_first(l: List[Any]) -> Any: +def _get_first(variable: List[Any]) -> Any: ''' get first element from list, if the list has any; raise NoValueException otherwise ''' try: - return l[0] + return variable[0] except IndexError: raise NoValueException(f'empty list passed to _get_first') -def _get_first_fqdn(l: List[str]) -> str: +def _get_first_fqdn(variable: List[str]) -> str: ''' get first valid FQDN from a list of strings ''' - valid_fqdns = (hostname for hostname in l if harmonization.FQDN.is_valid(hostname, sanitize=True)) + valid_fqdns = (hostname for hostname in variable if harmonization.FQDN.is_valid(hostname, sanitize=True)) first = next(valid_fqdns, None) if first is None: - raise NoValueException(f'no valid FQDN in {l!r} passed to _get_first_fqdn') + raise NoValueException(f'no valid FQDN in {variable!r} passed to _get_first_fqdn') return first diff --git a/intelmq/bots/parsers/taichung/parser.py b/intelmq/bots/parsers/taichung/parser.py index 952e8a7dc..afbd31164 100644 --- a/intelmq/bots/parsers/taichung/parser.py +++ b/intelmq/bots/parsers/taichung/parser.py @@ -48,8 +48,7 @@ def get_type(self, value): def parse(self, report): raw_report = utils.base64_decode(report.get("raw")) - for row in raw_report.split(''): - yield row + yield from raw_report.split('') def parse_line(self, row, report): # Get IP Address and Type diff --git a/intelmq/lib/bot.py b/intelmq/lib/bot.py index 214a8a571..2bfe56756 100644 --- a/intelmq/lib/bot.py +++ b/intelmq/lib/bot.py @@ -47,7 +47,7 @@ IGNORED_SYSTEM_PARAMETERS = {'groupname', 'bot_id', 'parameters'} -class Bot(object): +class Bot: """ Not to be reset when initialized again on reload. """ __current_message: Optional[libmessage.Message] = None __message_counter_delay: timedelta = timedelta(seconds=2) @@ -994,8 +994,7 @@ def parse_json(self, report: libmessage.Report): A basic JSON parser. Assumes a *list* of objects as input to be yield. """ raw_report: str = utils.base64_decode(report.get("raw")) - for line in json.loads(raw_report): - yield line + yield from json.loads(raw_report) def parse_json_stream(self, report: libmessage.Report): """ @@ -1309,5 +1308,5 @@ def export_event(self, event: libmessage.Event, return retval -class Parameters(object): +class Parameters: pass diff --git a/intelmq/lib/bot_debugger.py b/intelmq/lib/bot_debugger.py index 1e8e34b8f..0afeaa5c1 100644 --- a/intelmq/lib/bot_debugger.py +++ b/intelmq/lib/bot_debugger.py @@ -79,7 +79,7 @@ def run(self) -> str: elif self.run_subcommand == "process": self._process(self.dryrun, self.msg, self.show) else: - self.outputappend("Subcommand {} not known.".format(self.run_subcommand)) + self.outputappend(f"Subcommand {self.run_subcommand} not known.") return '\n'.join(self.output) or "" @@ -92,7 +92,7 @@ def _console(self, console_type): pass else: if console_type and console != console_type: - print("Console {} not available.".format(console_type)) + print(f"Console {console_type} not available.") print("*** Using console {}. Please use 'self' to access to the bot instance properties." "You may exit the console by 'c' command (like continue). ***" .format(module.__name__)) @@ -172,9 +172,9 @@ def arg2msg(self, msg): msg = MessageFactory.unserialize(msg, default_type=default_type) except (Exception, KeyError, TypeError, ValueError) as exc: if exists(msg): - with open(msg, "r") as f: + with open(msg) as f: return self.arg2msg(f.read()) - self.messageWizzard("Message can not be parsed from JSON: {}".format(error_message_from_exc(exc))) + self.messageWizzard(f"Message can not be parsed from JSON: {error_message_from_exc(exc)}") sys.exit(1) return msg diff --git a/intelmq/lib/exceptions.py b/intelmq/lib/exceptions.py index 66878be63..5c8230d8d 100644 --- a/intelmq/lib/exceptions.py +++ b/intelmq/lib/exceptions.py @@ -30,15 +30,15 @@ class InvalidArgument(IntelMQException): def __init__(self, argument: Any, got: Any = None, expected=None, docs: str = None): - message = "Argument {} is invalid.".format(repr(argument)) + message = f"Argument {repr(argument)} is invalid." if expected is list: - message += " Should be one of: {}.".format(list) + message += f" Should be one of: {list}." elif expected: # not None - message += " Should be of type: {}.".format(expected) + message += f" Should be of type: {expected}." if got: - message += " Got {}.".format(repr(got)) + message += f" Got {repr(got)}." if docs: - message += " For more information see {}".format(docs) + message += f" For more information see {docs}" super().__init__(message) @@ -52,7 +52,7 @@ def __init__(self, argument: Union[str, Exception]): class ConfigurationError(IntelMQException): def __init__(self, config: str, argument: str): - message = "%s configuration failed - %s" % (config, argument) + message = f"{config} configuration failed - {argument}" super().__init__(message) @@ -140,7 +140,7 @@ def __init__(self, dependency: str, version: Optional[str] = None, appendix = appendix + (" Installed is version {installed!r}." "".format(installed=installed)) if additional_text: - appendix = "%s %s" % (appendix, additional_text) + appendix = f"{appendix} {additional_text}" message = ("Could not load dependency {dependency!r}, please install it " "with apt/yum/dnf/zypper (possibly named " "python3-{dependency}) or pip3.{appendix}" diff --git a/intelmq/lib/harmonization.py b/intelmq/lib/harmonization.py index f73e9d16f..1b48236f9 100644 --- a/intelmq/lib/harmonization.py +++ b/intelmq/lib/harmonization.py @@ -53,7 +53,7 @@ ] -class GenericType(object): +class GenericType: @staticmethod def is_valid(value: str, sanitize: bool = False) -> bool: @@ -856,10 +856,10 @@ def sanitize(value: Union[int, str]) -> Optional[str]: def to_int(value: str) -> Optional[int]: try: ip_integer = socket.inet_pton(socket.AF_INET, value) - except socket.error: + except OSError: try: ip_integer = socket.inet_pton(socket.AF_INET6, value) - except socket.error: + except OSError: return None ip_integer = int(binascii.hexlify(ip_integer), 16) diff --git a/intelmq/lib/message.py b/intelmq/lib/message.py index 918b7a99a..69137209a 100644 --- a/intelmq/lib/message.py +++ b/intelmq/lib/message.py @@ -26,7 +26,7 @@ HARMONIZATION_KEY_FORMAT = re.compile(r'^[a-z_][a-z_0-9]+(\.[a-z_0-9]+)*$') -class MessageFactory(object): +class MessageFactory: """ unserialize: JSON encoded message to object serialize: object to JSON encoded object @@ -275,7 +275,7 @@ def add(self, key: str, value: str, sanitize: bool = True, continue if key != 'extra' and extravalue in self._IGNORED_VALUES: continue - super().__setitem__('{}.{}'.format(key, extrakey), + super().__setitem__(f'{key}.{extrakey}', extravalue) else: super().__setitem__(key, value) diff --git a/intelmq/lib/mixins/sql.py b/intelmq/lib/mixins/sql.py index 7a2bcf2f1..a7e45b4f5 100644 --- a/intelmq/lib/mixins/sql.py +++ b/intelmq/lib/mixins/sql.py @@ -34,7 +34,7 @@ def __init__(self, *args, **kwargs): self.format_char = val[1] break else: - raise ValueError("Wrong parameter 'engine' {0!r}, possible values are {1}".format(self.engine_name, engines)) + raise ValueError(f"Wrong parameter 'engine' {self.engine_name!r}, possible values are {engines}") super().__init__() diff --git a/intelmq/lib/pipeline.py b/intelmq/lib/pipeline.py index 15b5ff7b6..cf58b453c 100644 --- a/intelmq/lib/pipeline.py +++ b/intelmq/lib/pipeline.py @@ -26,7 +26,7 @@ pika = None -class PipelineFactory(object): +class PipelineFactory: @staticmethod def create(logger, broker=None, direction=None, queues=None, pipeline_args=None, load_balance=False, is_multithreaded=False): @@ -67,7 +67,7 @@ def create(logger, broker=None, direction=None, queues=None, pipeline_args=None, return pipe -class Pipeline(object): +class Pipeline: has_internal_queues = False # If the class currently holds a message, restricts the actions _has_message = False @@ -245,7 +245,7 @@ def send(self, message: str, path: str = "_default", "OOM command not allowed when used memory > 'maxmemory'." in exc.args[0]: raise MemoryError(exc.args[0]) elif 'Redis is configured to save RDB snapshots, but is currently not able to persist on disk' in exc.args[0]: - raise IOError(28, 'No space left on device or in memory. Redis can\'t save its snapshots. ' + raise OSError(28, 'No space left on device or in memory. Redis can\'t save its snapshots. ' 'Look at redis\'s logs.') raise exceptions.PipelineError(exc) @@ -423,24 +423,24 @@ class Amqp(Pipeline): intelmqctl_rabbitmq_monitoring_url = None def __init__(self, logger, pipeline_args: dict = None, load_balance=False, is_multithreaded=False): - super(Amqp, self).__init__(logger, pipeline_args, load_balance, is_multithreaded) + super().__init__(logger, pipeline_args, load_balance, is_multithreaded) if pika is None: raise ValueError("To use AMQP you must install the 'pika' library.") self.properties = pika.BasicProperties(delivery_mode=2) # message persistence def load_configurations(self, queues_type): - self.host = self.pipeline_args.get("{}_pipeline_host".format(queues_type), "10.0.0.1") - self.port = self.pipeline_args.get("{}_pipeline_port".format(queues_type), 5672) - self.username = self.pipeline_args.get("{}_pipeline_username".format(queues_type), None) - self.password = self.pipeline_args.get("{}_pipeline_password".format(queues_type), None) + self.host = self.pipeline_args.get(f"{queues_type}_pipeline_host", "10.0.0.1") + self.port = self.pipeline_args.get(f"{queues_type}_pipeline_port", 5672) + self.username = self.pipeline_args.get(f"{queues_type}_pipeline_username", None) + self.password = self.pipeline_args.get(f"{queues_type}_pipeline_password", None) # socket_timeout is None by default, which means no timeout - self.socket_timeout = self.pipeline_args.get("{}_pipeline_socket_timeout".format(queues_type), + self.socket_timeout = self.pipeline_args.get(f"{queues_type}_pipeline_socket_timeout", None) self.load_balance = self.pipeline_args.get("load_balance", False) - self.virtual_host = self.pipeline_args.get("{}_pipeline_amqp_virtual_host".format(queues_type), + self.virtual_host = self.pipeline_args.get(f"{queues_type}_pipeline_amqp_virtual_host", '/') - self.ssl = self.pipeline_args.get("{}_pipeline_ssl".format(queues_type), False) - self.exchange = self.pipeline_args.get("{}_pipeline_amqp_exchange".format(queues_type), "") + self.ssl = self.pipeline_args.get(f"{queues_type}_pipeline_ssl", False) + self.exchange = self.pipeline_args.get(f"{queues_type}_pipeline_amqp_exchange", "") self.load_balance_iterator = 0 self.kwargs = {} if self.username and self.password: @@ -509,7 +509,7 @@ def disconnect(self): def set_queues(self, queues: dict, queues_type: str): self.load_configurations(queues_type) - super(Amqp, self).set_queues(queues, queues_type) + super().set_queues(queues, queues_type) def _send(self, destination_queue, message, reconnect=True): self.check_connection() diff --git a/intelmq/lib/processmanager.py b/intelmq/lib/processmanager.py index a4fd12326..547a15b54 100644 --- a/intelmq/lib/processmanager.py +++ b/intelmq/lib/processmanager.py @@ -296,7 +296,7 @@ def bot_status(self, bot_id, *, proc=None): def __check_pid(self, bot_id): filename = self.PIDFILE.format(bot_id) if os.path.isfile(filename): - with open(filename, 'r') as fp: + with open(filename) as fp: pid = fp.read() try: return int(pid.strip()) @@ -554,10 +554,10 @@ def connect(self): self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.sock.connect(self.host) - class UnixStreamTransport(xmlrpc.client.Transport, object): + class UnixStreamTransport(xmlrpc.client.Transport): def __init__(self, socket_path): self.socket_path = socket_path - super(UnixStreamTransport, self).__init__() + super().__init__() def make_connection(self, host): return UnixStreamHTTPConnection(self.socket_path) @@ -566,7 +566,7 @@ def make_connection(self, host): socket_path = os.environ.get("SUPERVISOR_SOCKET", self.DEFAULT_SOCKET_PATH) if not os.path.exists(socket_path): - self._abort("Socket '{}' does not exists. Is supervisor running?".format(socket_path)) + self._abort(f"Socket '{socket_path}' does not exists. Is supervisor running?") if not os.access(socket_path, os.W_OK): current_user = getpass.getuser() @@ -591,7 +591,7 @@ def make_connection(self, host): supervisor_state = self.__supervisor_xmlrpc.supervisor.getState()["statename"] if supervisor_state != "RUNNING": - raise Exception("Unexpected supervisor state {}".format(supervisor_state)) + raise Exception(f"Unexpected supervisor state {supervisor_state}") try: self.__supervisor_xmlrpc.twiddler.getAPIVersion() @@ -608,7 +608,7 @@ def make_connection(self, host): return self.__supervisor_xmlrpc def _process_name(self, bot_id: str) -> str: - return "{}:{}".format(self.SUPERVISOR_GROUP, bot_id) + return f"{self.SUPERVISOR_GROUP}:{bot_id}" def _abort(self, message: str): if self._interactive: diff --git a/intelmq/lib/splitreports.py b/intelmq/lib/splitreports.py index 7f374f252..e115893c4 100644 --- a/intelmq/lib/splitreports.py +++ b/intelmq/lib/splitreports.py @@ -111,8 +111,7 @@ def read_delimited_chunks(infile: BinaryIO, chunk_size: int) -> Generator[bytes, if chunks: leftover = chunks[-1] chunks = chunks[:-1] - for chunk in chunks: - yield chunk + yield from chunks if not new_chunk: if leftover: diff --git a/intelmq/lib/test.py b/intelmq/lib/test.py index cbf0ae01e..6c0091d9b 100644 --- a/intelmq/lib/test.py +++ b/intelmq/lib/test.py @@ -46,7 +46,7 @@ } -class Parameters(object): +class Parameters: pass @@ -63,7 +63,7 @@ def mocked(conf_file): confname = os.path.join('etc/', os.path.split(conf_file)[-1]) fname = pkg_resources.resource_filename('intelmq', confname) - with open(fname, 'rt') as fpconfig: + with open(fname) as fpconfig: return json.load(fpconfig) else: return utils.load_configuration(conf_file) @@ -105,7 +105,7 @@ def skip_build_environment(): return unittest.skipIf(os.getenv('USER') == 'abuild', 'Test disabled in Build Service.') -class BotTestCase(object): +class BotTestCase: """ Provides common tests and assert methods for bot testing. """ @@ -199,12 +199,11 @@ def prepare_bot(self, parameters={}, destination_queues=None): """ self.log_stream = io.StringIO() - src_name = "{}-input".format(self.bot_id) + src_name = f"{self.bot_id}-input" if not destination_queues: - destination_queues = {"_default": "{}-output".format(self.bot_id)} + destination_queues = {"_default": f"{self.bot_id}-output"} else: - destination_queues = {queue_name: "%s-%s-output" % (self.bot_id, - queue_name.strip('_')) + destination_queues = {queue_name: f"{self.bot_id}-{queue_name.strip('_')}-output" for queue_name in destination_queues} config = BOT_CONFIG.copy() @@ -283,7 +282,7 @@ def test_static_bot_check_method(self, *args, **kwargs): self.assertNotEqual(check[0].upper(), 'ERROR', '%s.check returned the error %r.' '' % (self.bot_name, check[1])) - raise ValueError('checks is %r' % (checks, )) + raise ValueError(f'checks is {checks!r}') def run_bot(self, iterations: int = 1, error_on_pipeline: bool = False, prepare=True, parameters={}, @@ -424,7 +423,7 @@ def test_bot_name(self, *args, **kwargs): for type_name, type_match in self.bot_types.items(): try: self.assertRegex(self.bot_name, - r'\A[a-zA-Z0-9]+{}\Z'.format(type_match)) + fr'\A[a-zA-Z0-9]+{type_match}\Z') except AssertionError: counter += 1 if counter != len(self.bot_types) - 1: diff --git a/intelmq/lib/upgrades.py b/intelmq/lib/upgrades.py index a24b94c29..83dd28923 100644 --- a/intelmq/lib/upgrades.py +++ b/intelmq/lib/upgrades.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ © 2020 Sebastian Wagner diff --git a/intelmq/lib/utils.py b/intelmq/lib/utils.py index b88500c4c..d8396bcd7 100644 --- a/intelmq/lib/utils.py +++ b/intelmq/lib/utils.py @@ -80,7 +80,7 @@ RESPONSE_FILENAME = re.compile("filename=(.+)") -class Parameters(object): +class Parameters: pass @@ -213,7 +213,7 @@ def load_configuration(configuration_filepath: str) -> dict: ValueError: if file not found """ if os.path.exists(configuration_filepath): - with open(configuration_filepath, 'r') as fpconfig: + with open(configuration_filepath) as fpconfig: try: config = yaml.load(fpconfig) except ScannerError as exc: @@ -380,7 +380,7 @@ def log(name: str, log_path: Union[str, bool] = intelmq.DEFAULT_LOGGING_PATH, logging_level_stream = log_level if log_path and not syslog: - handler = RotatingFileHandler("%s/%s.log" % (log_path, name), + handler = RotatingFileHandler(f"{log_path}/{name}.log", maxBytes=log_max_size if log_max_size else 0, backupCount=log_max_copies) handler.setLevel(log_level) @@ -616,7 +616,7 @@ def unzip(file: bytes, extract_files: Union[bool, list], logger=None, if filename in extract_files) -class RewindableFileHandle(object): +class RewindableFileHandle: """ Can be used for easy retrieval of last input line to populate raw field during CSV parsing. @@ -668,7 +668,7 @@ def seconds_to_human(seconds: int, precision: int = 0) -> str: result = [] for frame in ('days', 'hours', 'minutes', 'seconds'): if getattr(relative, frame): - result.append('%.{}f%s'.format(precision) % (getattr(relative, frame), frame[0])) + result.append(f'%.{precision}f%s' % (getattr(relative, frame), frame[0])) return ' '.join(result) diff --git a/intelmq/tests/bots/experts/aggregate/test_expert.py b/intelmq/tests/bots/experts/aggregate/test_expert.py index f8d1b246e..eb788837c 100644 --- a/intelmq/tests/bots/experts/aggregate/test_expert.py +++ b/intelmq/tests/bots/experts/aggregate/test_expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Testing event aggregation diff --git a/intelmq/tests/bots/experts/domain_valid/test_expert.py b/intelmq/tests/bots/experts/domain_valid/test_expert.py index 9d9b3c9d1..9119b6347 100644 --- a/intelmq/tests/bots/experts/domain_valid/test_expert.py +++ b/intelmq/tests/bots/experts/domain_valid/test_expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Domain validator diff --git a/intelmq/tests/bots/experts/remove_affix/test_expert.py b/intelmq/tests/bots/experts/remove_affix/test_expert.py index 8b0c229c5..38403272f 100644 --- a/intelmq/tests/bots/experts/remove_affix/test_expert.py +++ b/intelmq/tests/bots/experts/remove_affix/test_expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Remove affix - String cut from string diff --git a/intelmq/tests/bots/experts/sieve/test_expert.py b/intelmq/tests/bots/experts/sieve/test_expert.py index 6253bff60..ef6c4e842 100644 --- a/intelmq/tests/bots/experts/sieve/test_expert.py +++ b/intelmq/tests/bots/experts/sieve/test_expert.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: AGPL-3.0-or-later # -*- coding: utf-8 -*- -from __future__ import unicode_literals import unittest import os diff --git a/intelmq/tests/bots/experts/truncate_by_delimiter/test_expert.py b/intelmq/tests/bots/experts/truncate_by_delimiter/test_expert.py index f25ee83b7..34c8ffa35 100644 --- a/intelmq/tests/bots/experts/truncate_by_delimiter/test_expert.py +++ b/intelmq/tests/bots/experts/truncate_by_delimiter/test_expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Testing truncate by delimiter bot diff --git a/intelmq/tests/bots/experts/trusted_introducer_lookup/test_expert.py b/intelmq/tests/bots/experts/trusted_introducer_lookup/test_expert.py index 3148079e8..49f1d11c5 100644 --- a/intelmq/tests/bots/experts/trusted_introducer_lookup/test_expert.py +++ b/intelmq/tests/bots/experts/trusted_introducer_lookup/test_expert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Testing trusted introducer Expert diff --git a/intelmq/tests/bots/outputs/bro_file/test_output.py b/intelmq/tests/bots/outputs/bro_file/test_output.py index bfab0e669..d62a76e91 100644 --- a/intelmq/tests/bots/outputs/bro_file/test_output.py +++ b/intelmq/tests/bots/outputs/bro_file/test_output.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Bro file output diff --git a/intelmq/tests/bots/outputs/rpz_file/test_output.py b/intelmq/tests/bots/outputs/rpz_file/test_output.py index d46b7629a..f1faf3a85 100644 --- a/intelmq/tests/bots/outputs/rpz_file/test_output.py +++ b/intelmq/tests/bots/outputs/rpz_file/test_output.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ RPZ file output diff --git a/intelmq/tests/bots/outputs/templated_smtp/test_output.py b/intelmq/tests/bots/outputs/templated_smtp/test_output.py index b7090dd46..57b84fff7 100644 --- a/intelmq/tests/bots/outputs/templated_smtp/test_output.py +++ b/intelmq/tests/bots/outputs/templated_smtp/test_output.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # SPDX-FileCopyrightText: 2021 Linköping University # SPDX-License-Identifier: AGPL-3.0-or-later @@ -87,7 +86,7 @@ def test_malformed_attachment_spec(self): unittest.mock.patch('smtplib.SMTP.close'): self.run_bot(allowed_error_count=1) self.sysconfig["attachments"] = saved_attachments - self.assertRegexpMatches(self.loglines_buffer, + self.assertRegex(self.loglines_buffer, "ERROR - Attachment does not have a text, ignoring:") def test_event(self): diff --git a/intelmq/tests/bots/parsers/html_table/test_feodotracker.py b/intelmq/tests/bots/parsers/html_table/test_feodotracker.py index fd4bd6950..7f66eecd6 100644 --- a/intelmq/tests/bots/parsers/html_table/test_feodotracker.py +++ b/intelmq/tests/bots/parsers/html_table/test_feodotracker.py @@ -19,7 +19,7 @@ } -line1 = '%s/%s' % (REPORT_LINES[94][:-14], REPORT_LINES[94][-14:]) +line1 = f'{REPORT_LINES[94][:-14]}/{REPORT_LINES[94][-14:]}' EVENT1 = {"raw": utils.base64_encode(line1.strip()), "__type": "Event", "time.source": "2021-05-10T14:56:05+00:00", @@ -30,7 +30,7 @@ "source.geolocation.cc": "GB", "status": "Online", } -line2 = '%s/%s' % (REPORT_LINES[95][:-14], REPORT_LINES[95][-14:]) +line2 = f'{REPORT_LINES[95][:-14]}/{REPORT_LINES[95][-14:]}' EVENT2 = {"raw": utils.base64_encode(line2.strip()), "__type": "Event", "time.source": "2021-05-10T14:56:04+00:00", diff --git a/intelmq/tests/bots/parsers/openphish/test_parser_commercial.py b/intelmq/tests/bots/parsers/openphish/test_parser_commercial.py index 83a2379ad..c2bc6cbab 100644 --- a/intelmq/tests/bots/parsers/openphish/test_parser_commercial.py +++ b/intelmq/tests/bots/parsers/openphish/test_parser_commercial.py @@ -10,7 +10,7 @@ import intelmq.lib.utils as utils from intelmq.bots.parsers.openphish.parser_commercial import OpenPhishCommercialParserBot -with open(os.path.join(os.path.dirname(__file__), 'feed_commercial.txt'), 'r') as fh: +with open(os.path.join(os.path.dirname(__file__), 'feed_commercial.txt')) as fh: FILE = fh.read() RAW = utils.base64_encode(FILE.encode()) SPLIT = FILE.splitlines() diff --git a/intelmq/tests/bots/parsers/shadowserver/test_testdata.py b/intelmq/tests/bots/parsers/shadowserver/test_testdata.py index 939e414f7..19cbdd7d7 100644 --- a/intelmq/tests/bots/parsers/shadowserver/test_testdata.py +++ b/intelmq/tests/bots/parsers/shadowserver/test_testdata.py @@ -19,7 +19,7 @@ def csvtojson(csvfile): datalist = [] - with open(csvfile, 'r') as fop: + with open(csvfile) as fop: reader = csv.DictReader(fop, restval="") for row in reader: @@ -36,12 +36,12 @@ def csvtojson(csvfile): CSVREPORTS[shortname] = {"raw": utils.base64_encode(EXAMPLE_FILE), "__type": "Report", "time.observation": "2015-01-01T00:00:00+00:00", - "extra.file_name": "2019-01-01-{}-test-test.csv".format(shortname), + "extra.file_name": f"2019-01-01-{shortname}-test-test.csv", } JSONREPORTS[shortname] = {"raw": utils.base64_encode(csvtojson(filename)), "__type": "Report", "time.observation": "2015-01-01T00:00:00+00:00", - "extra.file_name": "2019-01-01-{}-test-test.json".format(shortname), + "extra.file_name": f"2019-01-01-{shortname}-test-test.json", } diff --git a/intelmq/tests/bots/parsers/shodan/test_parser.py b/intelmq/tests/bots/parsers/shodan/test_parser.py index 67bcd16f6..4c48f1a21 100644 --- a/intelmq/tests/bots/parsers/shodan/test_parser.py +++ b/intelmq/tests/bots/parsers/shodan/test_parser.py @@ -11,7 +11,7 @@ import intelmq.lib.test as test from intelmq.bots.parsers.shodan.parser import ShodanParserBot -with open(os.path.join(os.path.dirname(__file__), 'tests.json'), 'rt') as fh: +with open(os.path.join(os.path.dirname(__file__), 'tests.json')) as fh: RAWS_UNENCODED = fh.read().splitlines() RAWS = [base64.b64encode(x.encode()).decode() for x in RAWS_UNENCODED] diff --git a/intelmq/tests/bots/parsers/twitter/test_parser.py b/intelmq/tests/bots/parsers/twitter/test_parser.py index 4a2b26d42..372bb6ccc 100644 --- a/intelmq/tests/bots/parsers/twitter/test_parser.py +++ b/intelmq/tests/bots/parsers/twitter/test_parser.py @@ -71,9 +71,8 @@ def set_bot(cls): cls.sysconfig = {"substitutions" : " .net;.net;[.];.;,;.", "classification_type": "blacklist", } - if sys.version_info >= (3, 6, 0): - # url-normalize 1.4.1 supporting this parameter is only available for 3.6 - cls.sysconfig["default_scheme"] = "http" + # url-normalize 1.4.1 supporting this parameter is only available for 3.6 + cls.sysconfig["default_scheme"] = "http" def test_parse(self): self.input_message = REPORT diff --git a/intelmq/tests/lib/test_message.py b/intelmq/tests/lib/test_message.py index 1454802db..9f9c2ddb4 100644 --- a/intelmq/tests/lib/test_message.py +++ b/intelmq/tests/lib/test_message.py @@ -583,7 +583,7 @@ def test_message_from_dict_return_type(self): event_type = type(message.MessageFactory.from_dict(event, harmonization=HARM)) self.assertTrue(event_type is message.Event, - msg='Type is {} instead of Event.'.format(event_type)) + msg=f'Type is {event_type} instead of Event.') def test_event_init_check(self): """ Test if initialization method checks fields. """ diff --git a/intelmq/tests/lib/test_upgrades.py b/intelmq/tests/lib/test_upgrades.py index c572f4061..8a6e13b21 100644 --- a/intelmq/tests/lib/test_upgrades.py +++ b/intelmq/tests/lib/test_upgrades.py @@ -537,8 +537,8 @@ def setUp(self): self.mapping_list = [] self.mapping_list_name = [] for values in upgrades.UPGRADES.values(): - self.mapping_list.extend((x for x in values)) - self.mapping_list_name.extend((x.__name__ for x in values)) + self.mapping_list.extend(x for x in values) + self.mapping_list_name.extend(x.__name__ for x in values) def test_all_functions_used(self): self.assertEqual(len(self.mapping_list_name),