From dd23c0fd7126ba0872deb210a862e8542ea6722a Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 16:35:11 -0500 Subject: [PATCH] Normalize strings --- .pre-commit-config.yaml | 2 +- install_requires.py | 2 +- pros/cli/build.py | 30 +- pros/cli/click_classes.py | 30 +- pros/cli/common.py | 120 ++-- pros/cli/compile_commands/intercept-cc.py | 2 +- pros/cli/conductor.py | 298 ++++----- pros/cli/conductor_utils.py | 94 +-- pros/cli/interactive.py | 2 +- pros/cli/main.py | 56 +- pros/cli/misc_commands.py | 18 +- pros/cli/terminal.py | 56 +- pros/cli/upload.py | 244 +++---- pros/cli/user_script.py | 6 +- pros/cli/v5_utils.py | 184 +++--- pros/common/sentry.py | 72 +- pros/common/ui/__init__.py | 60 +- pros/common/ui/interactive/ConfirmModal.py | 4 +- pros/common/ui/interactive/application.py | 28 +- .../ui/interactive/components/__init__.py | 24 +- .../ui/interactive/components/button.py | 2 +- .../ui/interactive/components/component.py | 6 +- .../ui/interactive/components/container.py | 6 +- .../common/ui/interactive/components/input.py | 2 +- .../common/ui/interactive/components/label.py | 2 +- pros/common/ui/interactive/observable.py | 2 +- .../ui/interactive/parameters/__init__.py | 12 +- .../interactive/parameters/misc_parameters.py | 8 +- .../ui/interactive/parameters/parameter.py | 8 +- .../parameters/validatable_parameter.py | 8 +- .../renderers/MachineOutputRenderer.py | 38 +- .../ui/interactive/renderers/Renderer.py | 2 +- pros/common/ui/log.py | 16 +- pros/common/utils.py | 24 +- pros/conductor/__init__.py | 2 +- pros/conductor/conductor.py | 238 +++---- pros/conductor/depots/depot.py | 12 +- pros/conductor/depots/http_depot.py | 14 +- pros/conductor/depots/local_depot.py | 18 +- pros/conductor/interactive/NewProjectModal.py | 30 +- .../interactive/UpdateProjectModal.py | 24 +- pros/conductor/interactive/components.py | 16 +- pros/conductor/interactive/parameters.py | 46 +- pros/conductor/project/ProjectReport.py | 6 +- pros/conductor/project/ProjectTransaction.py | 52 +- pros/conductor/project/__init__.py | 182 +++--- pros/conductor/templates/base_template.py | 48 +- pros/conductor/templates/external_template.py | 8 +- pros/conductor/templates/local_template.py | 2 +- pros/conductor/transaction.py | 20 +- pros/config/cli_config.py | 18 +- pros/config/config.py | 42 +- pros/ga/analytics.py | 38 +- pros/serial/__init__.py | 10 +- pros/serial/devices/stream_device.py | 2 +- pros/serial/devices/vex/cortex_device.py | 58 +- pros/serial/devices/vex/message.py | 2 +- pros/serial/devices/vex/stm32_device.py | 70 +- pros/serial/devices/vex/v5_device.py | 618 +++++++++--------- pros/serial/devices/vex/v5_user_device.py | 10 +- pros/serial/devices/vex/vex_device.py | 22 +- pros/serial/interactive/UploadProjectModal.py | 76 +-- pros/serial/interactive/__init__.py | 2 +- pros/serial/ports/__init__.py | 2 +- pros/serial/ports/direct_port.py | 10 +- pros/serial/ports/exceptions.py | 12 +- pros/serial/ports/serial_share_bridge.py | 58 +- pros/serial/ports/serial_share_port.py | 32 +- pros/serial/terminal/terminal.py | 46 +- pros/upgrade/__init__.py | 2 +- pros/upgrade/instructions/__init__.py | 2 +- .../instructions/download_instructions.py | 12 +- .../instructions/explorer_instructions.py | 4 +- .../instructions/nothing_instructions.py | 2 +- pros/upgrade/manifests/__init__.py | 2 +- pros/upgrade/manifests/upgrade_manifest_v1.py | 8 +- pros/upgrade/manifests/upgrade_manifest_v2.py | 20 +- pros/upgrade/upgrade_manager.py | 22 +- setup.py | 16 +- version.py | 38 +- 80 files changed, 1721 insertions(+), 1721 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 049cc229..d38ebd27 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: rev: 24.1.1 hooks: - id: black - args: ["--skip-string-normalization", "--line-length=120"] + args: ["--line-length=120"] - repo: local hooks: - id: pylint diff --git a/install_requires.py b/install_requires.py index 6aad2a80..e77dd742 100644 --- a/install_requires.py +++ b/install_requires.py @@ -1,2 +1,2 @@ -with open('requirements.txt') as reqs: +with open("requirements.txt") as reqs: install_requires = [req.strip() for req in reqs.readlines()] diff --git a/pros/cli/build.py b/pros/cli/build.py index 25f2098b..43178b5b 100644 --- a/pros/cli/build.py +++ b/pros/cli/build.py @@ -14,9 +14,9 @@ def build_cli(): pass -@build_cli.command(aliases=['build', 'm']) +@build_cli.command(aliases=["build", "m"]) @project_option() -@click.argument('build-args', nargs=-1) +@click.argument("build-args", nargs=-1) @default_options def make(project: c.Project, build_args): """ @@ -25,13 +25,13 @@ def make(project: c.Project, build_args): analytics.send("make") exit_code = project.compile(build_args) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code -@build_cli.command('make-upload', aliases=['mu'], hidden=True) -@click.option('build_args', '--make', '-m', multiple=True, help='Send arguments to make (e.g. compile target)') +@build_cli.command("make-upload", aliases=["mu"], hidden=True) +@click.option("build_args", "--make", "-m", multiple=True, help="Send arguments to make (e.g. compile target)") @shadow_command(upload) @project_option() @click.pass_context @@ -41,8 +41,8 @@ def make_upload(ctx, project: c.Project, build_args: List[str], **upload_args): ctx.invoke(upload, project=project, **upload_args) -@build_cli.command('make-upload-terminal', aliases=['mut'], hidden=True) -@click.option('build_args', '--make', '-m', multiple=True, help='Send arguments to make (e.g. compile target)') +@build_cli.command("make-upload-terminal", aliases=["mut"], hidden=True) +@click.option("build_args", "--make", "-m", multiple=True, help="Send arguments to make (e.g. compile target)") @shadow_command(upload) @project_option() @click.pass_context @@ -55,14 +55,14 @@ def make_upload_terminal(ctx, project: c.Project, build_args, **upload_args): ctx.invoke(terminal, port=project.target, request_banner=False) -@build_cli.command('build-compile-commands', hidden=True) +@build_cli.command("build-compile-commands", hidden=True) @project_option() @click.option( - '--suppress-output/--show-output', 'suppress_output', default=False, show_default=True, help='Suppress output' + "--suppress-output/--show-output", "suppress_output", default=False, show_default=True, help="Suppress output" ) -@click.option('--compile-commands', type=click.File('w'), default=None) -@click.option('--sandbox', default=False, is_flag=True) -@click.argument('build-args', nargs=-1) +@click.option("--compile-commands", type=click.File("w"), default=None) +@click.option("--sandbox", default=False, is_flag=True) +@click.argument("build-args", nargs=-1) @default_options def build_compile_commands( project: c.Project, suppress_output: bool, compile_commands, sandbox: bool, build_args: List[str] @@ -76,6 +76,6 @@ def build_compile_commands( build_args, cdb_file=compile_commands, suppress_output=suppress_output, sandbox=sandbox ) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code diff --git a/pros/cli/click_classes.py b/pros/cli/click_classes.py index 58aad9b5..6eeda9de 100644 --- a/pros/cli/click_classes.py +++ b/pros/cli/click_classes.py @@ -22,7 +22,7 @@ def format_commands(self, ctx, formatter): """Extra format methods for multi methods that adds all the commands after the options. """ - if not hasattr(self, 'list_commands'): + if not hasattr(self, "list_commands"): return rows = [] for subcommand in self.list_commands(ctx): @@ -30,14 +30,14 @@ def format_commands(self, ctx, formatter): # What is this, the tool lied about a command. Ignore it if cmd is None: continue - if hasattr(cmd, 'hidden') and cmd.hidden: + if hasattr(cmd, "hidden") and cmd.hidden: continue - help = cmd.short_help or '' + help = cmd.short_help or "" rows.append((subcommand, help)) if rows: - with formatter.section('Commands'): + with formatter.section("Commands"): formatter.write_dl(rows) def format_options(self, ctx, formatter): @@ -46,15 +46,15 @@ def format_options(self, ctx, formatter): for param in self.get_params(ctx): rv = param.get_help_record(ctx) if rv is not None: - if hasattr(param, 'group'): + if hasattr(param, "group"): opts[param.group].append(rv) else: - opts['Options'].append(rv) + opts["Options"].append(rv) - if len(opts['Options']) > 0: - with formatter.section('Options'): - formatter.write_dl(opts['Options']) - opts.pop('Options') + if len(opts["Options"]) > 0: + with formatter.section("Options"): + formatter.write_dl(opts["Options"]) + opts.pop("Options") for group, options in opts.items(): with formatter.section(group): @@ -79,16 +79,16 @@ def __init__(self, *args, hidden: bool = False, group: str = None, **kwargs): self.group = group def get_help_record(self, ctx): - if hasattr(self, 'hidden') and self.hidden: + if hasattr(self, "hidden") and self.hidden: return return super().get_help_record(ctx) class PROSDeprecated(click.Option): def __init__(self, *args, replacement: str = None, **kwargs): - kwargs['help'] = "This option has been deprecated." + kwargs["help"] = "This option has been deprecated." if not replacement == None: - kwargs['help'] += " Its replacement is '--{}'".format(replacement) + kwargs["help"] += " Its replacement is '--{}'".format(replacement) super(PROSDeprecated, self).__init__(*args, **kwargs) self.group = "Deprecated" self.optiontype = "flag" if str(self.type) == "BOOL" else "switch" @@ -116,7 +116,7 @@ def decorator(f): for alias in aliases: self.cmd_dict[alias] = f.__name__ if len(args) == 0 else args[0] - cmd = super(PROSGroup, self).command(*args, cls=kwargs.pop('cls', PROSCommand), **kwargs)(f) + cmd = super(PROSGroup, self).command(*args, cls=kwargs.pop("cls", PROSCommand), **kwargs)(f) self.add_command(cmd) return cmd @@ -128,7 +128,7 @@ def group(self, aliases=None, *args, **kwargs): def decorator(f): for alias in aliases: self.cmd_dict[alias] = f.__name__ - cmd = super(PROSGroup, self).group(*args, cls=kwargs.pop('cls', PROSGroup), **kwargs)(f) + cmd = super(PROSGroup, self).group(*args, cls=kwargs.pop("cls", PROSGroup), **kwargs)(f) self.add_command(cmd) return cmd diff --git a/pros/cli/common.py b/pros/cli/common.py index 30108507..8894ca97 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -16,23 +16,23 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) if value: logger().setLevel(min(logger().level, logging.INFO)) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.INFO) - logger(__name__).info('Verbose messages enabled') + logger(__name__).info("Verbose messages enabled") return value return click.option( - '--verbose', - help='Enable verbose output', + "--verbose", + help="Enable verbose output", is_flag=True, is_eager=True, expose_value=False, callback=callback, cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -44,25 +44,25 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) if value: logging.getLogger().setLevel(logging.DEBUG) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.DEBUG) - logging.getLogger(__name__).info('Debugging messages enabled') - if logger('pros').isEnabledFor(logging.DEBUG): - logger('pros').debug(f'CLI Version: {get_version()}') + logging.getLogger(__name__).info("Debugging messages enabled") + if logger("pros").isEnabledFor(logging.DEBUG): + logger("pros").debug(f"CLI Version: {get_version()}") return value return click.option( - '--debug', - help='Enable debugging output', + "--debug", + help="Enable debugging output", is_flag=True, is_eager=True, expose_value=False, callback=callback, cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -74,22 +74,22 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) logging.getLogger().setLevel(min(logger().level, value)) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(value) return value return click.option( - '-l', - '--log', - help='Logging level', + "-l", + "--log", + help="Logging level", is_eager=True, expose_value=False, callback=callback, - type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), + type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]), cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -102,26 +102,26 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value[1], str): level = getattr(logging, value[1].upper(), None) if not isinstance(level, int): - raise ValueError('Invalid log level: {}'.format(value[1])) - handler = logging.FileHandler(value[0], mode='w') - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s' + raise ValueError("Invalid log level: {}".format(value[1])) + handler = logging.FileHandler(value[0], mode="w") + fmt_str = "%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s" handler.setFormatter(logging.Formatter(fmt_str)) handler.setLevel(level) logging.getLogger().addHandler(handler) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.getLogger().level) # pin stdout_handler to its current log level logging.getLogger().setLevel(min(logging.getLogger().level, level)) return click.option( - '--logfile', - help='Log messages to a file', + "--logfile", + help="Log messages to a file", is_eager=True, expose_value=False, callback=callback, default=(None, None), - type=click.Tuple([click.Path(), click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])]), + type=click.Tuple([click.Path(), click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])]), cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -132,22 +132,22 @@ def machine_output_option(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: str): ctx.ensure_object(dict) - add_tag('machine-output', value) # goes in sentry report + add_tag("machine-output", value) # goes in sentry report if value: ctx.obj[param.name] = value logging.getLogger().setLevel(logging.DEBUG) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.DEBUG) - logging.getLogger(__name__).info('Debugging messages enabled') + logging.getLogger(__name__).info("Debugging messages enabled") return value decorator = click.option( - '--machine-output', + "--machine-output", expose_value=False, is_flag=True, default=False, is_eager=True, - help='Enable machine friendly output.', + help="Enable machine friendly output.", callback=callback, cls=PROSOption, hidden=True, @@ -163,12 +163,12 @@ def no_sentry_option(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-sentry', value) + add_tag("no-sentry", value) if value: pros.common.sentry.disable_prompt() decorator = click.option( - '--no-sentry', + "--no-sentry", expose_value=False, is_flag=True, default=False, @@ -189,14 +189,14 @@ def no_analytics(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-analytics', value) + add_tag("no-analytics", value) if value: echo("Not sending analytics for this command.\n") analytics.useAnalytics = False pass decorator = click.option( - '--no-analytics', + "--no-analytics", expose_value=False, is_flag=True, default=False, @@ -221,7 +221,7 @@ def default_options(f: Union[click.Command, Callable]): return decorator -def template_query(arg_name='query', required: bool = False): +def template_query(arg_name="query", required: bool = False): """ provides a wrapper for conductor commands which require an optional query @@ -234,10 +234,10 @@ def callback(ctx: click.Context, param: click.Parameter, value: Tuple[str, ...]) value = list(value) spec = None - if len(value) > 0 and not value[0].startswith('--'): + if len(value) > 0 and not value[0].startswith("--"): spec = value.pop(0) if not spec and required: - raise ValueError(f'A {arg_name} is required to perform this command') + raise ValueError(f"A {arg_name} is required to perform this command") query = c.BaseTemplate.create_query( spec, **{value[i][2:]: value[i + 1] for i in range(0, int(len(value) / 2) * 2, 2)} ) @@ -250,7 +250,7 @@ def wrapper(f: Union[click.Command, Callable]): return wrapper -def project_option(arg_name='project', required: bool = True, default: str = '.', allow_none: bool = False): +def project_option(arg_name="project", required: bool = True, default: str = ".", allow_none: bool = False): def callback(ctx: click.Context, param: click.Parameter, value: str): if allow_none and value is None: return None @@ -263,20 +263,20 @@ def callback(ctx: click.Context, param: click.Parameter, value: str): else: raise click.UsageError( f'{os.path.abspath(value or ".")} is not inside a PROS project. ' - f'Execute this command from within a PROS project or specify it ' - f'with --project project/path' + f"Execute this command from within a PROS project or specify it " + f"with --project project/path" ) return c.Project(project_path) def wrapper(f: Union[click.Command, Callable]): return click.option( - f'--{arg_name}', + f"--{arg_name}", callback=callback, required=required, default=default, type=click.Path(exists=True), show_default=True, - help='PROS Project directory or file', + help="PROS Project directory or file", )(f) return wrapper @@ -287,7 +287,7 @@ def wrapper(f: Union[click.Command, Callable]): if isinstance(f, click.Command): f.params.extend(p for p in command.params if p.name not in [p.name for p in command.params]) else: - if not hasattr(f, '__click_params__'): + if not hasattr(f, "__click_params__"): f.__click_params__ = [] f.__click_params__.extend(p for p in command.params if p.name not in [p.name for p in f.__click_params__]) return f @@ -322,20 +322,20 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl is_joystick = False if not port: ports = find_v5_ports(type) - logger(__name__).debug('Ports: {}'.format(';'.join([str(p.__dict__) for p in ports]))) + logger(__name__).debug("Ports: {}".format(";".join([str(p.__dict__) for p in ports]))) if len(ports) == 0: if not quiet: logger(__name__).error( - 'No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('v5'), - extra={'sentry': False}, + "No {0} ports were found! If you think you have a {0} plugged in, " + "run this command again with the --debug flag".format("v5"), + extra={"sentry": False}, ) return None, False if len(ports) > 1: if not quiet: port = click.prompt( - 'Multiple {} ports were found. Please choose one: [{}]'.format( - 'v5', '|'.join([p.device for p in ports]) + "Multiple {} ports were found. Please choose one: [{}]".format( + "v5", "|".join([p.device for p in ports]) ), default=ports[0].device, show_default=False, @@ -346,8 +346,8 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl return None, False else: port = ports[0].device - is_joystick = type == 'user' and 'Controller' in ports[0].description - logger(__name__).info('Automatically selected {}'.format(port)) + is_joystick = type == "user" and "Controller" in ports[0].description + logger(__name__).info("Automatically selected {}".format(port)) return port, is_joystick @@ -359,15 +359,15 @@ def resolve_cortex_port(port: Optional[str], quiet: bool = False) -> Optional[st if len(ports) == 0: if not quiet: logger(__name__).error( - 'No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('cortex'), - extra={'sentry': False}, + "No {0} ports were found! If you think you have a {0} plugged in, " + "run this command again with the --debug flag".format("cortex"), + extra={"sentry": False}, ) return None if len(ports) > 1: if not quiet: port = click.prompt( - 'Multiple {} ports were found. Please choose one: '.format('cortex'), + "Multiple {} ports were found. Please choose one: ".format("cortex"), default=ports[0].device, type=click.Choice([p.device for p in ports]), ) @@ -376,5 +376,5 @@ def resolve_cortex_port(port: Optional[str], quiet: bool = False) -> Optional[st return None else: port = ports[0].device - logger(__name__).info('Automatically selected {}'.format(port)) + logger(__name__).info("Automatically selected {}".format(port)) return port diff --git a/pros/cli/compile_commands/intercept-cc.py b/pros/cli/compile_commands/intercept-cc.py index 66026e54..7c50b48d 100644 --- a/pros/cli/compile_commands/intercept-cc.py +++ b/pros/cli/compile_commands/intercept-cc.py @@ -1,4 +1,4 @@ from libscanbuild.intercept import intercept_compiler_wrapper -if __name__ == '__main__': +if __name__ == "__main__": intercept_compiler_wrapper() diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 8a8f32e7..e8ca8e6c 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -13,7 +13,7 @@ def conductor_cli(): pass -@conductor_cli.group(cls=PROSGroup, aliases=['cond', 'c', 'conduct'], short_help='Perform project management for PROS') +@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): """ @@ -26,9 +26,9 @@ def conductor(): @conductor.command( - aliases=['download'], - short_help='Fetch/Download a remote template', - context_settings={'ignore_unknown_options': True}, + aliases=["download"], + short_help="Fetch/Download a remote template", + context_settings={"ignore_unknown_options": True}, ) @template_query(required=True) @default_options @@ -51,80 +51,80 @@ def fetch(query: c.BaseTemplate): template_file = query.identifier elif os.path.exists(query.name) and query.version is None: template_file = query.name - elif query.metadata.get('origin', None) == 'local': - if 'location' not in query.metadata: - logger(__name__).error('--location option is required for the local depot. Specify --location ') - logger(__name__).debug(f'Query options provided: {query.metadata}') + elif query.metadata.get("origin", None) == "local": + if "location" not in query.metadata: + logger(__name__).error("--location option is required for the local depot. Specify --location ") + logger(__name__).debug(f"Query options provided: {query.metadata}") return -1 - template_file = query.metadata['location'] + template_file = query.metadata["location"] if template_file and ( - os.path.splitext(template_file)[1] in ['.zip'] or os.path.exists(os.path.join(template_file, 'template.pros')) + os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) ): template = ExternalTemplate(template_file) - query.metadata['location'] = template_file + query.metadata["location"] = template_file depot = c.LocalDepot() - logger(__name__).debug(f'Template file found: {template_file}') + logger(__name__).debug(f"Template file found: {template_file}") else: if template_file: - logger(__name__).debug(f'Template file exists but is not a valid template: {template_file}') + logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") template = c.Conductor().resolve_template(query, allow_offline=False) - logger(__name__).debug(f'Template from resolved query: {template}') + logger(__name__).debug(f"Template from resolved query: {template}") if template is None: - logger(__name__).error(f'There are no templates matching {query}!') + logger(__name__).error(f"There are no templates matching {query}!") return -1 - depot = c.Conductor().get_depot(template.metadata['origin']) - logger(__name__).debug(f'Found depot: {depot}') + depot = c.Conductor().get_depot(template.metadata["origin"]) + logger(__name__).debug(f"Found depot: {depot}") # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine # whether the arguments are for the template or for the depot, so they share them - logger(__name__).debug(f'Additional depot and template args: {query.metadata}') + logger(__name__).debug(f"Additional depot and template args: {query.metadata}") c.Conductor().fetch_template(depot, template, **query.metadata) -@conductor.command(context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=True, help='Allow upgrading templates in a project') -@click.option('--install/--no-install', 'install_ok', default=True, help='Allow installing templates in a project') +@conductor.command(context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") +@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") @click.option( - '--download/--no-download', - 'download_ok', + "--download/--no-download", + "download_ok", default=True, - help='Allow downloading templates or only allow local templates', + help="Allow downloading templates or only allow local templates", ) @click.option( - '--upgrade-user-files/--no-upgrade-user-files', - 'force_user', + "--upgrade-user-files/--no-upgrade-user-files", + "force_user", default=False, - help='Replace all user files in a template', + help="Replace all user files in a template", ) @click.option( - '--force', - 'force_system', + "--force", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-apply', - 'force_apply', + "--force-apply", + "force_apply", default=False, is_flag=True, help="Force apply the template, disregarding if the template is already installed.", ) @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='Create a project using the PROS 4 kernel', + help="Create a project using the PROS 4 kernel", ) @project_option() @template_query(required=True) @@ -139,31 +139,31 @@ def apply(project: c.Project, query: c.BaseTemplate, **kwargs): return c.Conductor().apply_template(project, identifier=query, **kwargs) -@conductor.command(aliases=['i', 'in'], context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") @click.option( - '--force-system', - '-f', - 'force_system', + "--force-system", + "-f", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-apply', - 'force_apply', + "--force-apply", + "force_apply", default=False, is_flag=True, help="Force apply the template, disregarding if the template is already installed.", ) @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) @project_option() @template_query(required=True) @@ -179,40 +179,40 @@ def install(ctx: click.Context, **kwargs): return ctx.invoke(apply, install_ok=True, **kwargs) -@conductor.command(context_settings={'ignore_unknown_options': True}, aliases=['u']) -@click.option('--install/--no-install', 'install_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) +@click.option("--install/--no-install", "install_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") @click.option( - '--force-system', - '-f', - 'force_system', + "--force-system", + "-f", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-apply', - 'force_apply', + "--force-apply", + "force_apply", default=False, is_flag=True, help="Force apply the template, disregarding if the template is already installed.", ) @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='Create a project using the PROS 4 kernel', + help="Create a project using the PROS 4 kernel", ) @project_option() @template_query(required=False) @@ -227,25 +227,25 @@ def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwa analytics.send("upgrade-project") if not query.name: for template in project.templates.keys(): - click.secho(f'Upgrading {template}', color='yellow') + click.secho(f"Upgrading {template}", color="yellow") q = c.BaseTemplate.create_query( - name=template, target=project.target, supported_kernels=project.templates['kernel'].version + name=template, target=project.target, supported_kernels=project.templates["kernel"].version ) ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) else: ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) -@conductor.command('uninstall') -@click.option('--remove-user', is_flag=True, default=False, help='Also remove user files') +@conductor.command("uninstall") +@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) -@click.option('--no-make-clean', is_flag=True, default=True, help='Do not run make clean after removing') +@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") @project_option() @template_query() @default_options @@ -270,51 +270,51 @@ def uninstall_template( project.compile(["clean"]) -@conductor.command('new-project', aliases=['new', 'create-project']) -@click.argument('path', type=click.Path()) -@click.argument('target', default=c.Conductor().default_target, type=click.Choice(['v5', 'cortex'])) -@click.argument('version', default='latest') -@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@conductor.command("new-project", aliases=["new", "create-project"]) +@click.argument("path", type=click.Path()) +@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) +@click.argument("version", default="latest") +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") @click.option( - '--force-system', - '-f', - 'force_system', + "--force-system", + "-f", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-refresh', + "--force-refresh", is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks', + help="Force update all remote depots, ignoring automatic update checks", ) @click.option( - '--no-default-libs', - 'no_default_libs', + "--no-default-libs", + "no_default_libs", default=False, is_flag=True, - help='Do not install any default libraries after creating the project.', + help="Do not install any default libraries after creating the project.", ) @click.option( - '--compile-after', is_flag=True, default=True, show_default=True, help='Compile the project after creation' + "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" ) @click.option( - '--build-cache', + "--build-cache", is_flag=True, default=None, show_default=False, - help='Build compile commands cache after creation. Overrides --compile-after if both are specified.', + help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", ) @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='Create a project using the PROS 4 kernel', + help="Create a project using the PROS 4 kernel", ) @click.pass_context @default_options @@ -336,15 +336,15 @@ def new_project( Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ analytics.send("new-project") - version_source = version.lower() == 'latest' - if version.lower() == 'latest' or not version: - version = '>0' + version_source = version.lower() == "latest" + if version.lower() == "latest" or not version: + version = ">0" if not force_system and c.Project.find_project(path) is not None: logger(__name__).error( - 'A project already exists in this location at ' + "A project already exists in this location at " + c.Project.find_project(path) - + '! Delete it first. Are you creating a project in an existing one?', - extra={'sentry': False}, + + "! Delete it first. Are you creating a project in an existing one?", + extra={"sentry": False}, ) ctx.exit(-1) try: @@ -361,16 +361,16 @@ def new_project( no_default_libs=no_default_libs, **kwargs, ) - ui.echo('New PROS Project was created:', output_machine=False) + ui.echo("New PROS Project was created:", output_machine=False) ctx.invoke(info_project, project=project) if compile_after or build_cache: with ui.Notification(): - ui.echo('Building project...') + ui.echo("Building project...") exit_code = project.compile([], scan_build=build_cache) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") except Exception as e: pros.common.logger(__name__).exception(e) @@ -378,40 +378,40 @@ def new_project( @conductor.command( - 'query-templates', - aliases=['search-templates', 'ls-templates', 'lstemplates', 'querytemplates', 'searchtemplates'], - context_settings={'ignore_unknown_options': True}, + "query-templates", + aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates"], + context_settings={"ignore_unknown_options": True}, ) @click.option( - '--allow-offline/--no-offline', - 'allow_offline', + "--allow-offline/--no-offline", + "allow_offline", default=True, show_default=True, - help='(Dis)allow offline templates in the listing', + help="(Dis)allow offline templates in the listing", ) @click.option( - '--allow-online/--no-online', - 'allow_online', + "--allow-online/--no-online", + "allow_online", default=True, show_default=True, - help='(Dis)allow online templates in the listing', + help="(Dis)allow online templates in the listing", ) @click.option( - '--force-refresh', + "--force-refresh", is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks', + help="Force update all remote depots, ignoring automatic update checks", ) -@click.option('--limit', type=int, default=15, help='The maximum number of displayed results for each library') +@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='View a list of early access templates', + help="View a list of early access templates", ) @template_query(required=False) @click.pass_context @@ -454,30 +454,30 @@ def query_templates( key = (template.identifier, template.origin) if key in render_templates: if isinstance(template, c.LocalTemplate): - render_templates[key]['local'] = True + render_templates[key]["local"] = True else: render_templates[key] = { - 'name': template.name, - 'version': template.version, - 'location': template.origin, - 'target': template.target, - 'local': isinstance(template, c.LocalTemplate), + "name": template.name, + "version": template.version, + "location": template.origin, + "target": template.target, + "local": isinstance(template, c.LocalTemplate), } import semantic_version as semver render_templates = sorted( - render_templates.values(), key=lambda k: (k['name'], semver.Version(k['version']), k['local']), reverse=True + render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True ) # Impose the output limit for each library's templates output_templates = [] - for _, g in groupby(render_templates, key=lambda t: t['name'] + t['target']): + for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): output_templates += list(g)[:limit] - ui.finalize('template-query', output_templates) + ui.finalize("template-query", output_templates) -@conductor.command('info-project') -@click.option('--ls-upgrades/--no-ls-upgrades', 'ls_upgrades', default=False) +@conductor.command("info-project") +@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) @project_option() @default_options def info_project(project: c.Project, ls_upgrades): @@ -492,7 +492,7 @@ def info_project(project: c.Project, ls_upgrades): report = ProjectReport(project) _conductor = c.Conductor() if ls_upgrades: - for template in report.project['templates']: + for template in report.project["templates"]: import semantic_version as semver templates = _conductor.resolve_templates( @@ -502,12 +502,12 @@ def info_project(project: c.Project, ls_upgrades): ) template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) - ui.finalize('project-report', report) + ui.finalize("project-report", report) -@conductor.command('add-depot') -@click.argument('name') -@click.argument('url') +@conductor.command("add-depot") +@click.argument("name") +@click.argument("url") @default_options def add_depot(name: str, url: str): """ @@ -521,8 +521,8 @@ def add_depot(name: str, url: str): ui.echo(f"Added depot {name} from {url}") -@conductor.command('remove-depot') -@click.argument('name') +@conductor.command("remove-depot") +@click.argument("name") @default_options def remove_depot(name: str): """ @@ -536,8 +536,8 @@ def remove_depot(name: str): ui.echo(f"Removed depot {name}") -@conductor.command('query-depots') -@click.option('--url', is_flag=True) +@conductor.command("query-depots") +@click.option("--url", is_flag=True) @default_options def query_depots(url: bool): """ @@ -547,4 +547,4 @@ def query_depots(url: bool): """ _conductor = c.Conductor() ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo('\n'.join(_conductor.query_depots(url)) + "\n") + ui.echo("\n".join(_conductor.query_depots(url)) + "\n") diff --git a/pros/cli/conductor_utils.py b/pros/cli/conductor_utils.py index 7e148ed7..4f306030 100644 --- a/pros/cli/conductor_utils.py +++ b/pros/cli/conductor_utils.py @@ -17,28 +17,28 @@ from .conductor import conductor -@conductor.command('create-template', context_settings={'allow_extra_args': True, 'ignore_unknown_options': True}) -@click.argument('path', type=click.Path(exists=True)) -@click.argument('name') -@click.argument('version') +@conductor.command("create-template", context_settings={"allow_extra_args": True, "ignore_unknown_options": True}) +@click.argument("path", type=click.Path(exists=True)) +@click.argument("name") +@click.argument("version") @click.option( - '--system', 'system_files', multiple=True, type=click.Path(), help='Specify "system" files required by the template' + "--system", "system_files", multiple=True, type=click.Path(), help='Specify "system" files required by the template' ) @click.option( - '--user', - 'user_files', + "--user", + "user_files", multiple=True, type=click.Path(), - help='Specify files that are intended to be modified by users', + help="Specify files that are intended to be modified by users", ) -@click.option('--kernels', 'supported_kernels', help='Specify supported kernels') -@click.option('--target', type=click.Choice(['v5', 'cortex']), help='Specify the target platform (cortex or v5)') +@click.option("--kernels", "supported_kernels", help="Specify supported kernels") +@click.option("--target", type=click.Choice(["v5", "cortex"]), help="Specify the target platform (cortex or v5)") @click.option( - '--destination', + "--destination", type=click.Path(), - help='Specify an alternate destination for the created ZIP file or template descriptor', + help="Specify an alternate destination for the created ZIP file or template descriptor", ) -@click.option('--zip/--no-zip', 'do_zip', default=True, help='Create a ZIP file or create a template descriptor.') +@click.option("--zip/--no-zip", "do_zip", default=True, help="Create a ZIP file or create a template descriptor.") @default_options @click.pass_context def create_template(ctx, path: str, destination: str, do_zip: bool, **kwargs): @@ -71,114 +71,114 @@ def create_template(ctx, path: str, destination: str, do_zip: bool, **kwargs): if project: project = c.Project(project) path = project.location - if not kwargs['supported_kernels'] and kwargs['name'] != 'kernel': - kwargs['supported_kernels'] = f'^{project.kernel}' - kwargs['target'] = project.target + if not kwargs["supported_kernels"] and kwargs["name"] != "kernel": + kwargs["supported_kernels"] = f"^{project.kernel}" + kwargs["target"] = project.target if not destination: if os.path.isdir(path): destination = path else: destination = os.path.dirname(path) - kwargs['system_files'] = list(kwargs['system_files']) - kwargs['user_files'] = list(kwargs['user_files']) - kwargs['metadata'] = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, int(len(ctx.args) / 2) * 2, 2)} + kwargs["system_files"] = list(kwargs["system_files"]) + kwargs["user_files"] = list(kwargs["user_files"]) + kwargs["metadata"] = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, int(len(ctx.args) / 2) * 2, 2)} def get_matching_files(globs: List[str]) -> Set[str]: matching_files: List[str] = [] _path = os.path.normpath(path) + os.path.sep for g in [g for g in globs if glob.has_magic(g)]: - files = glob.glob(f'{path}/{g}', recursive=True) + files = glob.glob(f"{path}/{g}", recursive=True) files = filter(lambda f: os.path.isfile(f), files) files = [os.path.normpath(os.path.normpath(f).split(_path)[-1]) for f in files] matching_files.extend(files) # matches things like src/opcontrol.{c,cpp} so that we can expand to src/opcontrol.c and src/opcontrol.cpp - pattern = re.compile(r'^([\w{}]+.){{((?:\w+,)*\w+)}}$'.format(os.path.sep.replace('\\', '\\\\'))) + pattern = re.compile(r"^([\w{}]+.){{((?:\w+,)*\w+)}}$".format(os.path.sep.replace("\\", "\\\\"))) for f in [os.path.normpath(f) for f in globs if not glob.has_magic(f)]: if re.match(pattern, f): matches = re.split(pattern, f) - logger(__name__).debug(f'Matches on {f}: {matches}') - matching_files.extend([f'{matches[1]}{ext}' for ext in matches[2].split(',')]) + logger(__name__).debug(f"Matches on {f}: {matches}") + matching_files.extend([f"{matches[1]}{ext}" for ext in matches[2].split(",")]) else: matching_files.append(f) matching_files: Set[str] = set(matching_files) return matching_files - matching_system_files: Set[str] = get_matching_files(kwargs['system_files']) - matching_user_files: Set[str] = get_matching_files(kwargs['user_files']) + matching_system_files: Set[str] = get_matching_files(kwargs["system_files"]) + matching_user_files: Set[str] = get_matching_files(kwargs["user_files"]) matching_system_files: Set[str] = matching_system_files - matching_user_files # exclude existing project.pros and template.pros from the template, # and name@*.zip so that we don't redundantly include ZIPs - exclude_files = {'project.pros', 'template.pros', *get_matching_files([f"{kwargs['name']}@*.zip"])} + exclude_files = {"project.pros", "template.pros", *get_matching_files([f"{kwargs['name']}@*.zip"])} if project: exclude_files = exclude_files.union(project.list_template_files()) matching_system_files = matching_system_files - exclude_files matching_user_files = matching_user_files - exclude_files def filename_remap(file_path: str) -> str: - if os.path.dirname(file_path) == 'bin': - return file_path.replace('bin', 'firmware', 1) + if os.path.dirname(file_path) == "bin": + return file_path.replace("bin", "firmware", 1) return file_path - kwargs['system_files'] = list(map(filename_remap, matching_system_files)) - kwargs['user_files'] = list(map(filename_remap, matching_user_files)) + kwargs["system_files"] = list(map(filename_remap, matching_system_files)) + kwargs["user_files"] = list(map(filename_remap, matching_user_files)) if do_zip: - if not os.path.isdir(destination) and os.path.splitext(destination)[-1] != '.zip': - logger(__name__).error(f'{destination} must be a zip file or an existing directory.') + if not os.path.isdir(destination) and os.path.splitext(destination)[-1] != ".zip": + logger(__name__).error(f"{destination} must be a zip file or an existing directory.") return -1 with tempfile.TemporaryDirectory() as td: - template = ExternalTemplate(file=os.path.join(td, 'template.pros'), **kwargs) + template = ExternalTemplate(file=os.path.join(td, "template.pros"), **kwargs) template.save() if os.path.isdir(destination): - destination = os.path.join(destination, f'{template.identifier}.zip') - with zipfile.ZipFile(destination, mode='w') as z: - z.write(template.save_file, arcname='template.pros') + destination = os.path.join(destination, f"{template.identifier}.zip") + with zipfile.ZipFile(destination, mode="w") as z: + z.write(template.save_file, arcname="template.pros") for file in matching_user_files: source_path = os.path.join(path, file) dest_file = filename_remap(file) if os.path.exists(source_path): - ui.echo(f'U: {file}' + (f' -> {dest_file}' if file != dest_file else '')) - z.write(f'{path}/{file}', arcname=dest_file) + ui.echo(f"U: {file}" + (f" -> {dest_file}" if file != dest_file else "")) + z.write(f"{path}/{file}", arcname=dest_file) for file in matching_system_files: source_path = os.path.join(path, file) dest_file = filename_remap(file) if os.path.exists(source_path): - ui.echo(f'S: {file}' + (f' -> {dest_file}' if file != dest_file else '')) - z.write(f'{path}/{file}', arcname=dest_file) + ui.echo(f"S: {file}" + (f" -> {dest_file}" if file != dest_file else "")) + z.write(f"{path}/{file}", arcname=dest_file) else: if os.path.isdir(destination): - destination = os.path.join(destination, 'template.pros') + destination = os.path.join(destination, "template.pros") template = ExternalTemplate(file=destination, **kwargs) template.save() @conductor.command( - 'purge-template', help='Purge template(s) from the local cache', context_settings={'ignore_unknown_options': True} + "purge-template", help="Purge template(s) from the local cache", context_settings={"ignore_unknown_options": True} ) -@click.option('-f', '--force', is_flag=True, default=False, help='Do not prompt for removal of multiple templates') +@click.option("-f", "--force", is_flag=True, default=False, help="Do not prompt for removal of multiple templates") @template_query(required=False) @default_options def purge_template(query: c.BaseTemplate, force): analytics.send("purge-template") if not query: force = click.confirm( - 'Are you sure you want to remove all cached templates? This action is non-reversable!', abort=True + "Are you sure you want to remove all cached templates? This action is non-reversable!", abort=True ) cond = c.Conductor() templates = cond.resolve_templates(query, allow_online=False) beta_templates = cond.resolve_templates(query, allow_online=False, beta=True) if len(templates) == 0: - click.echo('No matching templates were found matching the spec.') + click.echo("No matching templates were found matching the spec.") return 0 t_list = [t.identifier for t in templates] + [t.identifier for t in beta_templates] - click.echo(f'The following template(s) will be removed {t_list}') + click.echo(f"The following template(s) will be removed {t_list}") if len(templates) > 1 and not force: - click.confirm(f'Are you sure you want to remove multiple templates?', abort=True) + click.confirm(f"Are you sure you want to remove multiple templates?", abort=True) for template in templates: if isinstance(template, c.LocalTemplate): cond.purge_template(template) diff --git a/pros/cli/interactive.py b/pros/cli/interactive.py index c36b72a2..465f716f 100644 --- a/pros/cli/interactive.py +++ b/pros/cli/interactive.py @@ -21,7 +21,7 @@ def interactive(): @interactive.command() -@click.option('--directory', default=os.path.join(os.path.expanduser('~'), 'My PROS Project')) +@click.option("--directory", default=os.path.join(os.path.expanduser("~"), "My PROS Project")) @default_options def new_project(directory): from pros.common.ui.interactive.renderers import MachineOutputRenderer diff --git a/pros/cli/main.py b/pros/cli/main.py index 1f539b35..2209ab1a 100644 --- a/pros/cli/main.py +++ b/pros/cli/main.py @@ -26,53 +26,53 @@ # Setup analytics first because it is used by other files -if sys.platform == 'win32': +if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) root_sources = [ - 'build', - 'conductor', - 'conductor_utils', - 'terminal', - 'upload', - 'v5_utils', - 'misc_commands', # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade - 'interactive', - 'user_script', + "build", + "conductor", + "conductor_utils", + "terminal", + "upload", + "v5_utils", + "misc_commands", # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade + "interactive", + "user_script", ] -if getattr(sys, 'frozen', False): +if getattr(sys, "frozen", False): exe_file = sys.executable else: exe_file = __file__ -if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, '.git')): - root_sources.append('test') +if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, ".git")): + root_sources.append("test") -if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, '.git')): +if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, ".git")): import pros.cli.test for root_source in root_sources: - __import__(f'pros.cli.{root_source}') + __import__(f"pros.cli.{root_source}") def main(): try: ctx_obj = {} click_handler = pros.common.ui.log.PROSLogHandler(ctx_obj=ctx_obj) - ctx_obj['click_handler'] = click_handler + ctx_obj["click_handler"] = click_handler formatter = pros.common.ui.log.PROSLogFormatter( - '%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}'.format( + "%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}".format( version=get_version() ), ctx_obj, ) click_handler.setFormatter(formatter) logging.basicConfig(level=logging.WARNING, handlers=[click_handler]) - cli.main(prog_name='pros', obj=ctx_obj, windows_expand_args=False) + cli.main(prog_name="pros", obj=ctx_obj, windows_expand_args=False) except KeyboardInterrupt: - click.echo('Aborted!') + click.echo("Aborted!") except Exception as e: logger(__name__).exception(e) @@ -81,10 +81,10 @@ def version(ctx: click.Context, param, value): if not value: return ctx.ensure_object(dict) - if ctx.obj.get('machine_output', False): + if ctx.obj.get("machine_output", False): ui.echo(get_version()) else: - ui.echo('pros, version {}'.format(get_version())) + ui.echo("pros, version {}".format(get_version())) ctx.exit(0) @@ -98,24 +98,24 @@ def use_analytics(ctx: click.Context, param, value): touse = False else: ui.echo( - 'Invalid argument provided for \'--use-analytics\'. Try \'--use-analytics=False\' or \'--use-analytics=True\'' + "Invalid argument provided for '--use-analytics'. Try '--use-analytics=False' or '--use-analytics=True'" ) ctx.exit(0) ctx.ensure_object(dict) analytics.set_use(touse) - ui.echo('Analytics set to : {}'.format(analytics.useAnalytics)) + ui.echo("Analytics set to : {}".format(analytics.useAnalytics)) ctx.exit(0) -@click.command('pros', cls=PROSCommandCollection, sources=root_commands) +@click.command("pros", cls=PROSCommandCollection, sources=root_commands) @click.pass_context @default_options @click.option( - '--version', help='Displays version and exits.', is_flag=True, expose_value=False, is_eager=True, callback=version + "--version", help="Displays version and exits.", is_flag=True, expose_value=False, is_eager=True, callback=version ) @click.option( - '--use-analytics', - help='Set analytics usage (True/False).', + "--use-analytics", + help="Set analytics usage (True/False).", type=str, expose_value=False, is_eager=True, @@ -131,5 +131,5 @@ def after_command(): analytics.process_requests() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pros/cli/misc_commands.py b/pros/cli/misc_commands.py index d2e62c52..36fa6dd5 100644 --- a/pros/cli/misc_commands.py +++ b/pros/cli/misc_commands.py @@ -10,13 +10,13 @@ def misc_commands_cli(): @misc_commands_cli.command() @click.option( - '--force-check', default=False, is_flag=True, help='Force check for updates, disregarding auto-check frequency' + "--force-check", default=False, is_flag=True, help="Force check for updates, disregarding auto-check frequency" ) @click.option( - '--no-install', + "--no-install", default=False, is_flag=True, - help='Only check if a new version is available, do not attempt to install', + help="Only check if a new version is available, do not attempt to install", ) @default_options def upgrade(force_check, no_install): @@ -26,7 +26,7 @@ def upgrade(force_check, no_install): with ui.Notification(): ui.echo( 'The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', - color='yellow', + color="yellow", ) return # Dead code below @@ -39,15 +39,15 @@ def upgrade(force_check, no_install): ui.logger(__name__).debug(repr(manifest)) if manager.has_stale_manifest: ui.logger(__name__).error( - 'Failed to get latest upgrade information. ' + 'Try running with --debug for more information' + "Failed to get latest upgrade information. " + "Try running with --debug for more information" ) return -1 if not manager.needs_upgrade: - ui.finalize('upgradeInfo', 'PROS CLI is up to date') + ui.finalize("upgradeInfo", "PROS CLI is up to date") else: - ui.finalize('upgradeInfo', manifest) + ui.finalize("upgradeInfo", manifest) if not no_install: if not manager.can_perform_upgrade: - ui.logger(__name__).error(f'This manifest cannot perform the upgrade.') + ui.logger(__name__).error(f"This manifest cannot perform the upgrade.") return -3 - ui.finalize('upgradeComplete', manager.perform_upgrade()) + ui.finalize("upgradeComplete", manager.perform_upgrade()) diff --git a/pros/cli/terminal.py b/pros/cli/terminal.py index 5071110b..ac6f25a0 100644 --- a/pros/cli/terminal.py +++ b/pros/cli/terminal.py @@ -22,25 +22,25 @@ def terminal_cli(): @terminal_cli.command() @default_options -@click.argument('port', default='default') +@click.argument("port", default="default") @click.option( - '--backend', - type=click.Choice(['share', 'solo']), - default='solo', - help='Backend port of the terminal. See above for details', + "--backend", + type=click.Choice(["share", "solo"]), + default="solo", + help="Backend port of the terminal. See above for details", ) -@click.option('--raw', is_flag=True, default=False, help='Don\'t process the data.') -@click.option('--hex', is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") +@click.option("--raw", is_flag=True, default=False, help="Don't process the data.") +@click.option("--hex", is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") @click.option( - '--ports', + "--ports", nargs=2, type=int, default=(None, None), help='Specify 2 ports for the "share" backend. The default option deterministically selects ports ' - 'based on the serial port name', + "based on the serial port name", ) -@click.option('--banner/--no-banner', 'request_banner', default=True) -@click.option('--output', nargs=1, type=str, is_eager=True, help='Redirect terminal output to a file', default=None) +@click.option("--banner/--no-banner", "request_banner", default=True) +@click.option("--output", nargs=1, type=str, is_eager=True, help="Redirect terminal output to a file", default=None) def terminal(port: str, backend: str, **kwargs): """ Open a terminal to a serial port @@ -57,31 +57,31 @@ def terminal(port: str, backend: str, **kwargs): from pros.serial.terminal import Terminal is_v5_user_joystick = False - if port == 'default': + if port == "default": project_path = c.Project.find_project(os.getcwd()) if project_path is None: - v5_port, is_v5_user_joystick = resolve_v5_port(None, 'user', quiet=True) + v5_port, is_v5_user_joystick = resolve_v5_port(None, "user", quiet=True) cortex_port = resolve_cortex_port(None, quiet=True) if ((v5_port is None) ^ (cortex_port is None)) or (v5_port is not None and v5_port == cortex_port): port = v5_port or cortex_port else: - raise click.UsageError('You must be in a PROS project directory to enable default port selecting') + raise click.UsageError("You must be in a PROS project directory to enable default port selecting") else: project = c.Project(project_path) port = project.target - if port == 'v5': + if port == "v5": port = None - port, is_v5_user_joystick = resolve_v5_port(port, 'user') - elif port == 'cortex': + port, is_v5_user_joystick = resolve_v5_port(port, "user") + elif port == "cortex": port = None port = resolve_cortex_port(port) - kwargs['raw'] = True + kwargs["raw"] = True if not port: return -1 - if backend == 'share': - raise NotImplementedError('Share backend is not yet implemented') + if backend == "share": + raise NotImplementedError("Share backend is not yet implemented") # ser = SerialSharePort(port) elif is_v5_user_joystick: logger(__name__).debug("it's a v5 joystick") @@ -89,16 +89,16 @@ def terminal(port: str, backend: str, **kwargs): else: logger(__name__).debug("not a v5 joystick") ser = DirectPort(port) - if kwargs.get('raw', False): + if kwargs.get("raw", False): device = devices.RawStreamDevice(ser) else: device = devices.vex.V5UserDevice(ser) - term = Terminal(device, request_banner=kwargs.pop('request_banner', True)) + term = Terminal(device, request_banner=kwargs.pop("request_banner", True)) class TerminalOutput(object): def __init__(self, file): self.terminal = sys.stdout - self.log = open(file, 'a') + self.log = open(file, "a") def write(self, data): self.terminal.write(data) @@ -111,12 +111,12 @@ def end(self): self.log.close() output = None - if kwargs.get('output', None): - output_file = kwargs['output'] - output = TerminalOutput(f'{output_file}') + if kwargs.get("output", None): + output_file = kwargs["output"] + output = TerminalOutput(f"{output_file}") term.console.output = output sys.stdout = output - logger(__name__).info(f'Redirecting Terminal Output to File: {output_file}') + logger(__name__).info(f"Redirecting Terminal Output to File: {output_file}") else: sys.stdout = sys.__stdout__ @@ -130,4 +130,4 @@ def end(self): if output: output.end() term.join() - logger(__name__).info('CLI Main Thread Dying') + logger(__name__).info("CLI Main Thread Dying") diff --git a/pros/cli/upload.py b/pros/cli/upload.py index f69c87bd..1ae3fcc3 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -13,105 +13,105 @@ def upload_cli(): pass -@upload_cli.command(aliases=['u']) +@upload_cli.command(aliases=["u"]) @click.option( - '--target', - type=click.Choice(['v5', 'cortex']), + "--target", + type=click.Choice(["v5", "cortex"]), default=None, required=False, - help='Specify the target microcontroller. Overridden when a PROS project is specified.', + help="Specify the target microcontroller. Overridden when a PROS project is specified.", ) -@click.argument('path', type=click.Path(exists=True), default=None, required=False) -@click.argument('port', type=str, default=None, required=False) +@click.argument("path", type=click.Path(exists=True), default=None, required=False) +@click.argument("port", type=str, default=None, required=False) @project_option(required=False, allow_none=True) @click.option( - '--run-after/--no-run-after', - 'run_after', + "--run-after/--no-run-after", + "run_after", default=None, - help='Immediately run the uploaded program.', + help="Immediately run the uploaded program.", cls=PROSDeprecated, - replacement='after', + replacement="after", ) @click.option( - '--run-screen/--execute', - 'run_screen', + "--run-screen/--execute", + "run_screen", default=None, - help='Display run program screen on the brain after upload.', + help="Display run program screen on the brain after upload.", cls=PROSDeprecated, - replacement='after', + replacement="after", ) @click.option( - '-af', - '--after', - type=click.Choice(['run', 'screen', 'none']), + "-af", + "--after", + type=click.Choice(["run", "screen", "none"]), default=None, - help='Action to perform on the brain after upload.', + help="Action to perform on the brain after upload.", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) -@click.option('--quirk', type=int, default=0) +@click.option("--quirk", type=int, default=0) @click.option( - '--name', - 'remote_name', + "--name", + "remote_name", type=str, default=None, required=False, - help='Remote program name.', + help="Remote program name.", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) @click.option( - '--slot', + "--slot", default=None, type=click.IntRange(min=1, max=8), - help='Program slot on the GUI.', + help="Program slot on the GUI.", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) @click.option( - '--icon', - type=click.Choice(['pros', 'pizza', 'planet', 'alien', 'ufo', 'robot', 'clawbot', 'question', 'X', 'power']), - default='pros', + "--icon", + type=click.Choice(["pros", "pizza", "planet", "alien", "ufo", "robot", "clawbot", "question", "X", "power"]), + default="pros", help="Change Program's icon on the V5 Brain", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) @click.option( - '--program-version', + "--program-version", default=None, type=str, - help='Specify version metadata for program.', + help="Specify version metadata for program.", cls=PROSOption, - group='V5 Options', + group="V5 Options", hidden=True, ) @click.option( - '--ini-config', + "--ini-config", type=click.Path(exists=True), default=None, - help='Specify a program configuration file.', + help="Specify a program configuration file.", cls=PROSOption, - group='V5 Options', + group="V5 Options", hidden=True, ) @click.option( - '--compress-bin/--no-compress-bin', - 'compress_bin', + "--compress-bin/--no-compress-bin", + "compress_bin", cls=PROSOption, - group='V5 Options', + group="V5 Options", default=True, - help='Compress the program binary before uploading.', + help="Compress the program binary before uploading.", ) @click.option( - '--description', + "--description", default="Made with PROS", type=str, cls=PROSOption, - group='V5 Options', - help='Change the description displayed for the program.', + group="V5 Options", + help="Change the description displayed for the program.", ) @click.option( - '--name', default=None, type=str, cls=PROSOption, group='V5 Options', help='Change the name of the program.' + "--name", default=None, type=str, cls=PROSOption, group="V5 Options", help="Change the name of the program." ) @default_options def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwargs): @@ -128,110 +128,110 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort - kwargs['ide_version'] = project.kernel if not project == None else "None" - kwargs['ide'] = 'PROS' + kwargs["ide_version"] = project.kernel if not project == None else "None" + kwargs["ide"] = "PROS" if path is None or os.path.isdir(path): if project is None: project_path = c.Project.find_project(path or os.getcwd()) if project_path is None: - raise click.UsageError('Specify a file to upload or set the cwd inside a PROS project') + raise click.UsageError("Specify a file to upload or set the cwd inside a PROS project") project = c.Project(project_path) path = os.path.join(project.location, project.output) - if project.target == 'v5' and not kwargs['remote_name']: - kwargs['remote_name'] = project.name + if project.target == "v5" and not kwargs["remote_name"]: + kwargs["remote_name"] = project.name # apply upload_options as a template options = dict(**project.upload_options) - if 'port' in options and port is None: - port = options.get('port', None) - if 'slot' in options and kwargs.get('slot', None) is None: - kwargs.pop('slot') - elif kwargs.get('slot', None) is None: - kwargs['slot'] = 1 - if 'icon' in options and kwargs.get('icon', 'pros') == 'pros': - kwargs.pop('icon') - if 'after' in options and kwargs.get('after', 'screen') is None: - kwargs.pop('after') + if "port" in options and port is None: + port = options.get("port", None) + if "slot" in options and kwargs.get("slot", None) is None: + kwargs.pop("slot") + elif kwargs.get("slot", None) is None: + kwargs["slot"] = 1 + if "icon" in options and kwargs.get("icon", "pros") == "pros": + kwargs.pop("icon") + if "after" in options and kwargs.get("after", "screen") is None: + kwargs.pop("after") options.update(kwargs) kwargs = options - kwargs['target'] = project.target # enforce target because uploading to the wrong uC is VERY bad - if 'program-version' in kwargs: - kwargs['version'] = kwargs['program-version'] - if 'remote_name' not in kwargs: - kwargs['remote_name'] = project.name + kwargs["target"] = project.target # enforce target because uploading to the wrong uC is VERY bad + if "program-version" in kwargs: + kwargs["version"] = kwargs["program-version"] + if "remote_name" not in kwargs: + kwargs["remote_name"] = project.name name_to_file = { - 'pros': 'USER902x.bmp', - 'pizza': 'USER003x.bmp', - 'planet': 'USER013x.bmp', - 'alien': 'USER027x.bmp', - 'ufo': 'USER029x.bmp', - 'clawbot': 'USER010x.bmp', - 'robot': 'USER011x.bmp', - 'question': 'USER002x.bmp', - 'power': 'USER012x.bmp', - 'X': 'USER001x.bmp', + "pros": "USER902x.bmp", + "pizza": "USER003x.bmp", + "planet": "USER013x.bmp", + "alien": "USER027x.bmp", + "ufo": "USER029x.bmp", + "clawbot": "USER010x.bmp", + "robot": "USER011x.bmp", + "question": "USER002x.bmp", + "power": "USER012x.bmp", + "X": "USER001x.bmp", } - kwargs['icon'] = name_to_file[kwargs['icon']] - if 'target' not in kwargs or kwargs['target'] is None: - logger(__name__).debug(f'Target not specified. Arguments provided: {kwargs}') - raise click.UsageError('Target not specified. specify a project (using the file argument) or target manually') - if kwargs['target'] == 'v5': - port = resolve_v5_port(port, 'system')[0] - elif kwargs['target'] == 'cortex': + kwargs["icon"] = name_to_file[kwargs["icon"]] + if "target" not in kwargs or kwargs["target"] is None: + logger(__name__).debug(f"Target not specified. Arguments provided: {kwargs}") + raise click.UsageError("Target not specified. specify a project (using the file argument) or target manually") + if kwargs["target"] == "v5": + port = resolve_v5_port(port, "system")[0] + elif kwargs["target"] == "cortex": port = resolve_cortex_port(port) else: logger(__name__).debug(f"Invalid target provided: {kwargs['target']}") logger(__name__).debug('Target should be one of ("v5" or "cortex").') if not port: - raise dont_send(click.UsageError('No port provided or located. Make sure to specify --target if needed.')) - if kwargs['target'] == 'v5': - kwargs['remote_name'] = kwargs['name'] if kwargs.get("name", None) else kwargs['remote_name'] - if kwargs['remote_name'] is None: - kwargs['remote_name'] = os.path.splitext(os.path.basename(path))[0] - kwargs['remote_name'] = kwargs['remote_name'].replace('@', '_') - kwargs['slot'] -= 1 + raise dont_send(click.UsageError("No port provided or located. Make sure to specify --target if needed.")) + if kwargs["target"] == "v5": + kwargs["remote_name"] = kwargs["name"] if kwargs.get("name", None) else kwargs["remote_name"] + if kwargs["remote_name"] is None: + kwargs["remote_name"] = os.path.splitext(os.path.basename(path))[0] + kwargs["remote_name"] = kwargs["remote_name"].replace("@", "_") + kwargs["slot"] -= 1 action_to_kwarg = { - 'run': vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, - 'screen': vex.V5Device.FTCompleteOptions.RUN_SCREEN, - 'none': vex.V5Device.FTCompleteOptions.DONT_RUN, + "run": vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, + "screen": vex.V5Device.FTCompleteOptions.RUN_SCREEN, + "none": vex.V5Device.FTCompleteOptions.DONT_RUN, } - after_upload_default = 'screen' + after_upload_default = "screen" # Determine which FTCompleteOption to assign to run_after - if kwargs['after'] == None: - kwargs['after'] = after_upload_default - if kwargs['run_after']: - kwargs['after'] = 'run' - elif kwargs['run_screen'] == False and not kwargs['run_after']: - kwargs['after'] = 'none' - kwargs['run_after'] = action_to_kwarg[kwargs['after']] - kwargs.pop('run_screen') - kwargs.pop('after') - elif kwargs['target'] == 'cortex': + if kwargs["after"] == None: + kwargs["after"] = after_upload_default + if kwargs["run_after"]: + kwargs["after"] = "run" + elif kwargs["run_screen"] == False and not kwargs["run_after"]: + kwargs["after"] = "none" + kwargs["run_after"] = action_to_kwarg[kwargs["after"]] + kwargs.pop("run_screen") + kwargs.pop("after") + elif kwargs["target"] == "cortex": pass - logger(__name__).debug('Arguments: {}'.format(str(kwargs))) + logger(__name__).debug("Arguments: {}".format(str(kwargs))) # Do the actual uploading! try: ser = DirectPort(port) device = None - if kwargs['target'] == 'v5': + if kwargs["target"] == "v5": device = vex.V5Device(ser) - elif kwargs['target'] == 'cortex': + elif kwargs["target"] == "cortex": device = vex.CortexDevice(ser).get_connected_device() if project is not None: device.upload_project(project, **kwargs) else: - with click.open_file(path, mode='rb') as pf: + with click.open_file(path, mode="rb") as pf: device.write_program(pf, **kwargs) except Exception as e: logger(__name__).exception(e, exc_info=True) exit(1) -@upload_cli.command('lsusb', aliases=['ls-usb', 'ls-devices', 'lsdev', 'list-usb', 'list-devices']) -@click.option('--target', type=click.Choice(['v5', 'cortex']), default=None, required=False) +@upload_cli.command("lsusb", aliases=["ls-usb", "ls-devices", "lsdev", "list-usb", "list-devices"]) +@click.option("--target", type=click.Choice(["v5", "cortex"]), default=None, required=False) @default_options def ls_usb(target): """ @@ -243,34 +243,34 @@ def ls_usb(target): class PortReport(object): def __init__(self, header: str, ports: List[Any], machine_header: Optional[str] = None): self.header = header - self.ports = [{'device': p.device, 'desc': p.description} for p in ports] + self.ports = [{"device": p.device, "desc": p.description} for p in ports] self.machine_header = machine_header or header def __getstate__(self): - return {'device_type': self.machine_header, 'devices': self.ports} + return {"device_type": self.machine_header, "devices": self.ports} def __str__(self): if len(self.ports) == 0: - return f'There are no connected {self.header}' + return f"There are no connected {self.header}" else: port_str = "\n".join([f"{p['device']} - {p['desc']}" for p in self.ports]) - return f'{self.header}:\n{port_str}' + return f"{self.header}:\n{port_str}" result = [] - if target == 'v5' or target is None: - ports = find_v5_ports('system') - result.append(PortReport('VEX EDR V5 System Ports', ports, 'v5/system')) + if target == "v5" or target is None: + ports = find_v5_ports("system") + result.append(PortReport("VEX EDR V5 System Ports", ports, "v5/system")) - ports = find_v5_ports('User') - result.append(PortReport('VEX EDR V5 User ports', ports, 'v5/user')) - if target == 'cortex' or target is None: + ports = find_v5_ports("User") + result.append(PortReport("VEX EDR V5 User ports", ports, "v5/user")) + if target == "cortex" or target is None: ports = find_cortex_ports() - result.append(PortReport('VEX EDR Cortex Microcontroller Ports', ports, 'cortex')) + result.append(PortReport("VEX EDR Cortex Microcontroller Ports", ports, "cortex")) - ui.finalize('lsusb', result) + ui.finalize("lsusb", result) -@upload_cli.command('upload-terminal', aliases=['ut'], hidden=True) +@upload_cli.command("upload-terminal", aliases=["ut"], hidden=True) @shadow_command(upload) @click.pass_context def make_upload_terminal(ctx, **upload_kwargs): diff --git a/pros/cli/user_script.py b/pros/cli/user_script.py index a9057496..e2e26d30 100644 --- a/pros/cli/user_script.py +++ b/pros/cli/user_script.py @@ -11,8 +11,8 @@ def user_script_cli(): pass -@user_script_cli.command(short_help='Run user script files', hidden=True) -@click.argument('script_file') +@user_script_cli.command(short_help="Run user script files", hidden=True) +@click.argument("script_file") @default_options def user_script(script_file): """ @@ -24,6 +24,6 @@ def user_script(script_file): package_name = os.path.splitext(os.path.split(script_file)[0])[0] package_path = os.path.abspath(script_file) - ui.echo(f'Loading {package_name} from {package_path}') + ui.echo(f"Loading {package_name} from {package_path}") spec = importlib.util.spec_from_file_location(package_name, package_path) spec.loader.load_module() diff --git a/pros/cli/v5_utils.py b/pros/cli/v5_utils.py index f69ecfa6..efb51208 100644 --- a/pros/cli/v5_utils.py +++ b/pros/cli/v5_utils.py @@ -8,14 +8,14 @@ def v5_utils_cli(): pass -@v5_utils_cli.group(cls=PROSGroup, help='Utilities for managing the VEX V5') +@v5_utils_cli.group(cls=PROSGroup, help="Utilities for managing the VEX V5") @default_options def v5(): pass @v5.command() -@click.argument('port', required=False, default=None) +@click.argument("port", required=False, default=None) @default_options def status(port: str): """ @@ -25,7 +25,7 @@ def status(port: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -34,17 +34,17 @@ def status(port: str): if ismachineoutput(): print(device.status) else: - print('Connected to V5 on {}'.format(port)) - print('System version:', device.status['system_version']) - print('CPU0 F/W version:', device.status['cpu0_version']) - print('CPU1 SDK version:', device.status['cpu1_version']) - print('System ID: 0x{:x}'.format(device.status['system_id'])) + print("Connected to V5 on {}".format(port)) + print("System version:", device.status["system_version"]) + print("CPU0 F/W version:", device.status["cpu0_version"]) + print("CPU1 SDK version:", device.status["cpu1_version"]) + print("System ID: 0x{:x}".format(device.status["system_id"])) -@v5.command('ls-files') -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--options', type=int, default=0, cls=PROSOption, hidden=True) -@click.argument('port', required=False, default=None) +@v5.command("ls-files") +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--options", type=int, default=0, cls=PROSOption, hidden=True) +@click.argument("port", required=False, default=None) @default_options def ls_files(port: str, vid: int, options: int): """ @@ -54,7 +54,7 @@ def ls_files(port: str, vid: int, options: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -66,11 +66,11 @@ def ls_files(port: str, vid: int, options: int): @v5.command(hidden=True) -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.argument('outfile', required=False, default=click.get_binary_stream('stdout'), type=click.File('wb')) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--source', type=click.Choice(['ddr', 'flash']), default='flash', cls=PROSOption, hidden=True) +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.argument("outfile", required=False, default=click.get_binary_stream("stdout"), type=click.File("wb")) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--source", type=click.Choice(["ddr", "flash"]), default="flash", cls=PROSOption, hidden=True) @default_options def read_file(file_name: str, port: str, vid: int, source: str): """ @@ -80,23 +80,23 @@ def read_file(file_name: str, port: str, vid: int, source: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.read_file(file=click.get_binary_stream('stdout'), remote_file=file_name, vid=vid, target=source) + device.read_file(file=click.get_binary_stream("stdout"), remote_file=file_name, vid=vid, target=source) @v5.command(hidden=True) -@click.argument('file', type=click.File('rb')) -@click.argument('port', required=False, default=None) -@click.option('--addr', type=int, default=0x03800000, required=False) -@click.option('--remote-file', required=False, default=None) -@click.option('--run-after/--no-run-after', 'run_after', default=False) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--target', type=click.Choice(['ddr', 'flash']), default='flash') +@click.argument("file", type=click.File("rb")) +@click.argument("port", required=False, default=None) +@click.option("--addr", type=int, default=0x03800000, required=False) +@click.option("--remote-file", required=False, default=None) +@click.option("--run-after/--no-run-after", "run_after", default=False) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--target", type=click.Choice(["ddr", "flash"]), default="flash") @default_options def write_file(file, port: str, remote_file: str, **kwargs): """ @@ -106,7 +106,7 @@ def write_file(file, port: str, remote_file: str, **kwargs): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -115,16 +115,16 @@ def write_file(file, port: str, remote_file: str, **kwargs): device.write_file(file=file, remote_file=remote_file or os.path.basename(file.name), **kwargs) -@v5.command('rm-file') -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("rm-file") +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @click.option( - '--erase-all/--erase-only', - 'erase_all', + "--erase-all/--erase-only", + "erase_all", default=False, show_default=True, - help='Erase all files matching base name.', + help="Erase all files matching base name.", ) @default_options def rm_file(file_name: str, port: str, vid: int, erase_all: bool): @@ -135,7 +135,7 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -144,10 +144,10 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): device.erase_file(file_name, vid=vid, erase_all=erase_all) -@v5.command('cat-metadata') -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("cat-metadata") +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @default_options def cat_metadata(file_name: str, port: str, vid: int): """ @@ -157,7 +157,7 @@ def cat_metadata(file_name: str, port: str, vid: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -166,10 +166,10 @@ def cat_metadata(file_name: str, port: str, vid: int): print(device.get_file_metadata_by_name(file_name, vid=vid)) -@v5.command('rm-program') -@click.argument('slot') -@click.argument('port', type=int, required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("rm-program") +@click.argument("slot") +@click.argument("port", type=int, required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @default_options def rm_program(slot: int, port: str, vid: int): """ @@ -178,20 +178,20 @@ def rm_program(slot: int, port: str, vid: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 - base_name = f'slot_{slot}' + base_name = f"slot_{slot}" ser = DirectPort(port) device = V5Device(ser) - device.erase_file(f'{base_name}.ini', vid=vid) - device.erase_file(f'{base_name}.bin', vid=vid) + device.erase_file(f"{base_name}.ini", vid=vid) + device.erase_file(f"{base_name}.bin", vid=vid) -@v5.command('rm-all') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, hidden=True, cls=PROSOption) +@v5.command("rm-all") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, hidden=True, cls=PROSOption) @default_options def rm_all(port: str, vid: int): """ @@ -201,7 +201,7 @@ def rm_all(port: str, vid: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -210,14 +210,14 @@ def rm_all(port: str, vid: int): c = device.get_dir_count(vid=vid) files = [] for i in range(0, c): - files.append(device.get_file_metadata_by_idx(i)['filename']) + files.append(device.get_file_metadata_by_idx(i)["filename"]) for file in files: device.erase_file(file, vid=vid) -@v5.command(short_help='Run a V5 Program') -@click.argument('slot', required=False, default=1, type=click.IntRange(1, 8)) -@click.argument('port', required=False, default=None) +@v5.command(short_help="Run a V5 Program") +@click.argument("slot", required=False, default=1, type=click.IntRange(1, 8)) +@click.argument("port", required=False, default=None) @default_options def run(slot: str, port: str): """ @@ -227,13 +227,13 @@ def run(slot: str, port: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - file = f'slot_{slot}.bin' + file = f"slot_{slot}.bin" import re - if not re.match(r'[\w\.]{1,24}', file): - logger(__name__).error('file must be a valid V5 filename') + if not re.match(r"[\w\.]{1,24}", file): + logger(__name__).error("file must be a valid V5 filename") return 1 - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) @@ -241,8 +241,8 @@ def run(slot: str, port: str): device.execute_program_file(file, run=True) -@v5.command(short_help='Stop a V5 Program') -@click.argument('port', required=False, default=None) +@v5.command(short_help="Stop a V5 Program") +@click.argument("port", required=False, default=None) @default_options def stop(port: str): """ @@ -253,18 +253,18 @@ def stop(port: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.execute_program_file('', run=False) + device.execute_program_file("", run=False) -@v5.command(short_help='Take a screen capture of the display') -@click.argument('file_name', required=False, default=None) -@click.argument('port', required=False, default=None) -@click.option('--force', is_flag=True, type=bool, default=False) +@v5.command(short_help="Take a screen capture of the display") +@click.argument("file_name", required=False, default=None) +@click.argument("port", required=False, default=None) +@click.option("--force", is_flag=True, type=bool, default=False) @default_options def capture(file_name: str, port: str, force: bool = False): """ @@ -277,7 +277,7 @@ def capture(file_name: str, port: str, force: bool = False): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) @@ -285,65 +285,65 @@ def capture(file_name: str, port: str, force: bool = False): i_data, width, height = device.capture_screen() if i_data is None: - print('Failed to capture screen from connected brain.') + print("Failed to capture screen from connected brain.") return -1 # Sanity checking and default values for filenames if file_name is None: import time - time_s = time.strftime('%Y-%m-%d-%H%M%S') - file_name = f'{time_s}_{width}x{height}_pros_capture.png' - if file_name == '-': + time_s = time.strftime("%Y-%m-%d-%H%M%S") + file_name = f"{time_s}_{width}x{height}_pros_capture.png" + if file_name == "-": # Send the data to stdout to allow for piping - print(i_data, end='') + print(i_data, end="") return - if not file_name.endswith('.png'): - file_name += '.png' + if not file_name.endswith(".png"): + file_name += ".png" if not force and os.path.exists(file_name): - print(f'{file_name} already exists. Refusing to overwrite!') - print('Re-run this command with the --force argument to overwrite existing files.') + print(f"{file_name} already exists. Refusing to overwrite!") + print("Re-run this command with the --force argument to overwrite existing files.") return -1 - with open(file_name, 'wb') as file_: + with open(file_name, "wb") as file_: w = png.Writer(width, height, greyscale=False) w.write(file_, i_data) - print(f'Saved screen capture to {file_name}') + print(f"Saved screen capture to {file_name}") -@v5.command(aliases=['sv', 'set'], short_help='Set a kernel variable on a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('value', required=True, type=click.STRING, nargs=1) -@click.argument('port', type=str, default=None, required=False) +@v5.command(aliases=["sv", "set"], short_help="Set a kernel variable on a connected V5 device") +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("value", required=True, type=click.STRING, nargs=1) +@click.argument("port", type=str, default=None, required=False) @default_options def set_variable(variable, value, port): import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort # Get the connected v5 device - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if port == None: return device = vex.V5Device(DirectPort(port)) actual_value = device.kv_write(variable, value).decode() - print(f'Value of \'{variable}\' set to : {actual_value}') + print(f"Value of '{variable}' set to : {actual_value}") -@v5.command(aliases=['rv', 'get'], short_help='Read a kernel variable from a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('port', type=str, default=None, required=False) +@v5.command(aliases=["rv", "get"], short_help="Read a kernel variable from a connected V5 device") +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("port", type=str, default=None, required=False) @default_options def read_variable(variable, port): import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort # Get the connected v5 device - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if port == None: return device = vex.V5Device(DirectPort(port)) value = device.kv_read(variable).decode() - print(f'Value of \'{variable}\' is : {value}') + print(f"Value of '{variable}' is : {value}") diff --git a/pros/common/sentry.py b/pros/common/sentry.py index 38207169..57032478 100644 --- a/pros/common/sentry.py +++ b/pros/common/sentry.py @@ -10,7 +10,7 @@ from pros.config.cli_config import CliConfig # noqa: F401, flake8 issue, flake8 issue with "if TYPE_CHECKING" -cli_config: 'CliConfig' = None +cli_config: "CliConfig" = None force_prompt_off = False SUPPRESSED_EXCEPTIONS = [PermissionError, click.Abort] @@ -29,43 +29,43 @@ def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Opt if cli_config is None or (cli_config.offer_sentry is not None and not cli_config.offer_sentry): return if force_prompt_off: - ui.logger(__name__).debug('Sentry prompt was forced off through click option') + ui.logger(__name__).debug("Sentry prompt was forced off through click option") return - if 'extra' in event and not event['extra'].get('sentry', True): - ui.logger(__name__).debug('Not sending candidate event because event was tagged with extra.sentry = False') + if "extra" in event and not event["extra"].get("sentry", True): + ui.logger(__name__).debug("Not sending candidate event because event was tagged with extra.sentry = False") return - if 'exc_info' in hint and ( - not getattr(hint['exc_info'][1], 'sentry', True) - or any(isinstance(hint['exc_info'][1], t) for t in SUPPRESSED_EXCEPTIONS) + if "exc_info" in hint and ( + not getattr(hint["exc_info"][1], "sentry", True) + or any(isinstance(hint["exc_info"][1], t) for t in SUPPRESSED_EXCEPTIONS) ): - ui.logger(__name__).debug('Not sending candidate event because exception was tagged with sentry = False') + ui.logger(__name__).debug("Not sending candidate event because exception was tagged with sentry = False") return - if not event['tags']: - event['tags'] = dict() - - extra_text = '' - if 'message' in event: - extra_text += event['message'] + '\n' - if 'culprit' in event: - extra_text += event['culprit'] + '\n' - if 'logentry' in event and 'message' in event['logentry']: - extra_text += event['logentry']['message'] + '\n' - if 'exc_info' in hint: + if not event["tags"]: + event["tags"] = dict() + + extra_text = "" + if "message" in event: + extra_text += event["message"] + "\n" + if "culprit" in event: + extra_text += event["culprit"] + "\n" + if "logentry" in event and "message" in event["logentry"]: + extra_text += event["logentry"]["message"] + "\n" + if "exc_info" in hint: import traceback - extra_text += ''.join(traceback.format_exception(*hint['exc_info'], limit=4)) + extra_text += "".join(traceback.format_exception(*hint["exc_info"], limit=4)) - event['tags']['confirmed'] = ui.confirm( - 'We detected something went wrong! Do you want to send a report?', log=extra_text + event["tags"]["confirmed"] = ui.confirm( + "We detected something went wrong! Do you want to send a report?", log=extra_text ) - if event['tags']['confirmed']: - ui.echo('Sending bug report.') + if event["tags"]["confirmed"]: + ui.echo("Sending bug report.") ui.echo(f'Want to get updates? Visit https://pros.cs.purdue.edu/report.html?event={event["event_id"]}') return event else: - ui.echo('Not sending bug report.') + ui.echo("Not sending bug report.") def add_context(obj: object, override_handlers: bool = True, key: str = None) -> None: @@ -90,14 +90,14 @@ class TemplateHandler(jsonpickle.handlers.BaseHandler): def flatten(self, obj: BaseTemplate, data): rv = { - 'name': obj.name, - 'version': obj.version, - 'target': obj.target, + "name": obj.name, + "version": obj.version, + "target": obj.target, } - if hasattr(obj, 'location'): - rv['location'] = obj.location - if hasattr(obj, 'origin'): - rv['origin'] = obj.origin + if hasattr(obj, "location"): + rv["location"] = obj.location + if hasattr(obj, "origin"): + rv["origin"] = obj.origin return rv def restore(self, obj): @@ -122,7 +122,7 @@ def add_tag(key: str, value: str): scope.set_tag(key, value) -def register(cfg: Optional['CliConfig'] = None): +def register(cfg: Optional["CliConfig"] = None): global cli_config, client if cfg is None: from pros.config.cli_config import cli_config as get_cli_config @@ -141,14 +141,14 @@ def register(cfg: Optional['CliConfig'] = None): from pros.upgrade import get_platformv2 client = sentry.Client( - 'https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033', + "https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033", before_send=prompt_to_send, release=ui.get_version(), ) sentry.Hub.current.bind_client(client) with sentry.configure_scope() as scope: - scope.set_tag('platformv2', get_platformv2().name) + scope.set_tag("platformv2", get_platformv2().name) -__all__ = ['add_context', 'register', 'add_tag'] +__all__ = ["add_context", "register", "add_tag"] diff --git a/pros/common/ui/__init__.py b/pros/common/ui/__init__.py index d79b1e0f..4b4963e4 100644 --- a/pros/common/ui/__init__.py +++ b/pros/common/ui/__init__.py @@ -12,15 +12,15 @@ def _machineoutput(obj: Dict[str, Any]): - click.echo(f'Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}') + click.echo(f"Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}") def _machine_notify(method: str, obj: Dict[str, Any], notify_value: Optional[int]): if notify_value is None: global _current_notify_value notify_value = _current_notify_value - obj['type'] = f'notify/{method}' - obj['notify_value'] = notify_value + obj["type"] = f"notify/{method}" + obj["notify_value"] = notify_value _machineoutput(obj) @@ -33,10 +33,10 @@ def echo( output_machine: bool = True, ctx: Optional[click.Context] = None, ): - add_breadcrumb(message=text, category='echo') + add_breadcrumb(message=text, category="echo") if ismachineoutput(ctx): if output_machine: - return _machine_notify('echo', {'text': str(text) + ('\n' if nl else '')}, notify_value) + return _machine_notify("echo", {"text": str(text) + ("\n" if nl else "")}, notify_value) else: return click.echo(str(text), nl=nl, err=err, color=color) @@ -45,13 +45,13 @@ def confirm( text: str, default: bool = False, abort: bool = False, - prompt_suffix: bool = ': ', + prompt_suffix: bool = ": ", show_default: bool = True, err: bool = False, - title: AnyStr = 'Please confirm:', + title: AnyStr = "Please confirm:", log: str = None, ): - add_breadcrumb(message=text, category='confirm') + add_breadcrumb(message=text, category="confirm") if ismachineoutput(): from pros.common.ui.interactive.ConfirmModal import ConfirmModal from pros.common.ui.interactive.renderers import MachineOutputRenderer @@ -62,7 +62,7 @@ def confirm( rv = click.confirm( text, default=default, abort=abort, prompt_suffix=prompt_suffix, show_default=show_default, err=err ) - add_breadcrumb(message=f'User responded: {rv}') + add_breadcrumb(message=f"User responded: {rv}") return rv @@ -73,7 +73,7 @@ def prompt( confirmation_prompt=False, type=None, value_proc=None, - prompt_suffix=': ', + prompt_suffix=": ", show_default=True, err=False, ): @@ -102,10 +102,10 @@ def progressbar( show_percent: bool = True, show_pos: bool = False, item_show_func: Callable = None, - fill_char: str = '#', - empty_char: str = '-', - bar_template: str = '%(label)s [%(bar)s] %(info)s', - info_sep: str = ' ', + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", width: int = 36, ): if ismachineoutput(): @@ -129,12 +129,12 @@ def finalize( human_readable = data elif isinstance(data, List): if len(data) == 0: - human_readable = '' + human_readable = "" elif isinstance(data[0], str): - human_readable = '\n'.join(data) + human_readable = "\n".join(data) elif isinstance(data[0], dict) or isinstance(data[0], object): - if hasattr(data[0], '__str__'): - human_readable = '\n'.join([str(d) for d in data]) + if hasattr(data[0], "__str__"): + human_readable = "\n".join([str(d) for d in data]) else: if not isinstance(data[0], dict): data = [d.__dict__ for d in data] @@ -147,13 +147,13 @@ def finalize( human_readable = tabulate.tabulate(data[1:], headers=data[0]) else: human_readable = data - elif hasattr(data, '__str__'): + elif hasattr(data, "__str__"): human_readable = str(data) else: human_readable = data.__dict__ - human_readable = (human_prefix or '') + str(human_readable) + human_readable = (human_prefix or "") + str(human_readable) if ismachineoutput(): - _machineoutput({'type': 'finalize', 'method': method, 'data': data, 'human': human_readable}) + _machineoutput({"type": "finalize", "method": method, "data": data, "human": human_readable}) else: echo(human_readable) @@ -161,8 +161,8 @@ def finalize( class _MachineOutputProgressBar(_click_ProgressBar): def __init__(self, *args, **kwargs): global _current_notify_value - kwargs['file'] = open(os.devnull, 'w', encoding='UTF-8') - self.notify_value = kwargs.pop('notify_value', _current_notify_value) + kwargs["file"] = open(os.devnull, "w", encoding="UTF-8") + self.notify_value = kwargs.pop("notify_value", _current_notify_value) super(_MachineOutputProgressBar, self).__init__(*args, **kwargs) def __del__(self): @@ -170,10 +170,10 @@ def __del__(self): def render_progress(self): super(_MachineOutputProgressBar, self).render_progress() - obj = {'text': self.label, 'pct': self.pct} + obj = {"text": self.label, "pct": self.pct} if self.show_eta and self.eta_known and not self.finished: - obj['eta'] = self.eta - _machine_notify('progress', obj, self.notify_value) + obj["eta"] = self.eta + _machine_notify("progress", obj, self.notify_value) class Notification(object): @@ -206,7 +206,7 @@ def __init__(self, err: bool = False, ctx: Optional[click.Context] = None): threading.Thread.__init__(self) self.daemon = False self.fdRead, self.fdWrite = os.pipe() - self.pipeReader = os.fdopen(self.fdRead, encoding='UTF-8') + self.pipeReader = os.fdopen(self.fdRead, encoding="UTF-8") self.start() def fileno(self): @@ -215,8 +215,8 @@ def fileno(self): def run(self): """Run the thread, logging everything.""" - for line in iter(self.pipeReader.readline, ''): - echo(line.strip('\n'), ctx=self.click_ctx, err=self.is_err) + for line in iter(self.pipeReader.readline, ""): + echo(line.strip("\n"), ctx=self.click_ctx, err=self.is_err) self.pipeReader.close() @@ -225,4 +225,4 @@ def close(self): os.close(self.fdWrite) -__all__ = ['finalize', 'echo', 'confirm', 'prompt', 'progressbar', 'EchoPipe'] +__all__ = ["finalize", "echo", "confirm", "prompt", "progressbar", "EchoPipe"] diff --git a/pros/common/ui/interactive/ConfirmModal.py b/pros/common/ui/interactive/ConfirmModal.py index d4c59235..f444ec87 100644 --- a/pros/common/ui/interactive/ConfirmModal.py +++ b/pros/common/ui/interactive/ConfirmModal.py @@ -10,8 +10,8 @@ class ConfirmModal(application.Modal[bool]): In --machine-output mode, this Modal is run instead of a textual confirmation request (e.g. click.confirm()) """ - def __init__(self, text: str, abort: bool = False, title: AnyStr = 'Please confirm:', log: Optional[AnyStr] = None): - super().__init__(title, will_abort=abort, confirm_button='Yes', cancel_button='No', description=text) + def __init__(self, text: str, abort: bool = False, title: AnyStr = "Please confirm:", log: Optional[AnyStr] = None): + super().__init__(title, will_abort=abort, confirm_button="Yes", cancel_button="No", description=text) self.log = log def confirm(self): diff --git a/pros/common/ui/interactive/application.py b/pros/common/ui/interactive/application.py index df393471..fb8bd85f 100644 --- a/pros/common/ui/interactive/application.py +++ b/pros/common/ui/interactive/application.py @@ -3,7 +3,7 @@ from .components import Component from .observable import Observable -P = TypeVar('P') +P = TypeVar("P") class Application(Observable, Generic[P]): @@ -22,7 +22,7 @@ def __del__(self): self.exit() def on_exit(self, *handlers: Callable): - return super(Application, self).on('end', *handlers) + return super(Application, self).on("end", *handlers) def exit(self, **kwargs): """ @@ -31,24 +31,24 @@ def exit(self, **kwargs): :arg return: set the return value before triggering exit. This value would be the value returned by Renderer.run(Application) """ - if 'return' in kwargs: - self.set_return(kwargs['return']) - self.trigger('end') + if "return" in kwargs: + self.set_return(kwargs["return"]) + self.trigger("end") def on_redraw(self, *handlers: Callable, **kwargs) -> Callable: - return super(Application, self).on('redraw', *handlers, **kwargs) + return super(Application, self).on("redraw", *handlers, **kwargs) def redraw(self) -> None: - self.trigger('redraw') + self.trigger("redraw") def set_return(self, value: P) -> None: """ Set the return value of Renderer.run(Application) """ - self.trigger('return', value) + self.trigger("return", value) def on_return_set(self, *handlers: Callable, **kwargs): - return super(Application, self).on('return', *handlers, **kwargs) + return super(Application, self).on("return", *handlers, **kwargs) @classmethod def get_hierarchy(cls, base: type) -> Optional[List[str]]: @@ -102,8 +102,8 @@ def __init__( title: AnyStr, description: Optional[AnyStr] = None, will_abort: bool = True, - confirm_button: AnyStr = 'Continue', - cancel_button: AnyStr = 'Cancel', + confirm_button: AnyStr = "Continue", + cancel_button: AnyStr = "Cancel", can_confirm: Optional[bool] = None, ): super().__init__() @@ -114,13 +114,13 @@ def __init__( self.cancel_button = cancel_button self._can_confirm = can_confirm - self.on('confirm', self._confirm) + self.on("confirm", self._confirm) def on_cancel(): nonlocal self self.cancel() - self.on('cancel', on_cancel) + self.on("cancel", on_cancel) def confirm(self, *args, **kwargs): raise NotImplementedError() @@ -140,7 +140,7 @@ def build(self) -> Generator[Component, None, None]: def __getstate__(self): extra_state = {} if self.description is not None: - extra_state['description'] = self.description + extra_state["description"] = self.description return dict( **super(Modal, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/__init__.py b/pros/common/ui/interactive/components/__init__.py index abc969a8..419bc371 100644 --- a/pros/common/ui/interactive/components/__init__.py +++ b/pros/common/ui/interactive/components/__init__.py @@ -7,16 +7,16 @@ from .label import Label, Spinner, VerbatimLabel __all__ = [ - 'Component', - 'Button', - 'Container', - 'InputBox', - 'ButtonGroup', - 'DropDownBox', - 'Label', - 'DirectorySelector', - 'FileSelector', - 'Checkbox', - 'Spinner', - 'VerbatimLabel', + "Component", + "Button", + "Container", + "InputBox", + "ButtonGroup", + "DropDownBox", + "Label", + "DirectorySelector", + "FileSelector", + "Checkbox", + "Spinner", + "VerbatimLabel", ] diff --git a/pros/common/ui/interactive/components/button.py b/pros/common/ui/interactive/components/button.py index dee126b5..184b930c 100644 --- a/pros/common/ui/interactive/components/button.py +++ b/pros/common/ui/interactive/components/button.py @@ -14,7 +14,7 @@ def __init__(self, text: AnyStr): self.text = text def on_clicked(self, *handlers: Callable, **kwargs): - return self.on('clicked', *handlers, **kwargs) + return self.on("clicked", *handlers, **kwargs) def __getstate__(self) -> dict: return dict(**super(Button, self).__getstate__(), text=self.text, uuid=self.uuid) diff --git a/pros/common/ui/interactive/components/component.py b/pros/common/ui/interactive/components/component.py index 3880e5a6..500454f1 100644 --- a/pros/common/ui/interactive/components/component.py +++ b/pros/common/ui/interactive/components/component.py @@ -32,7 +32,7 @@ def __getstate__(self) -> Dict: return dict(etype=Component.get_hierarchy(self.__class__)) -P = TypeVar('P', bound=Parameter) +P = TypeVar("P", bound=Parameter) class ParameterizedComponent(Component, Generic[P]): @@ -46,10 +46,10 @@ def __init__(self, parameter: P): def __getstate__(self): extra_state = {} if isinstance(self.parameter, ValidatableParameter): - extra_state['valid'] = self.parameter.is_valid() + extra_state["valid"] = self.parameter.is_valid() reason = self.parameter.is_valid_reason() if reason: - extra_state['valid_reason'] = self.parameter.is_valid_reason() + extra_state["valid_reason"] = self.parameter.is_valid_reason() return dict( **super(ParameterizedComponent, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/container.py b/pros/common/ui/interactive/components/container.py index 6f251110..b153c1ac 100644 --- a/pros/common/ui/interactive/components/container.py +++ b/pros/common/ui/interactive/components/container.py @@ -23,11 +23,11 @@ def __init__( self.collapsed = BooleanParameter(collapsed) if isinstance(collapsed, bool) else collapsed def __getstate__(self): - extra_state = {'uuid': self.collapsed.uuid, 'collapsed': self.collapsed.value} + extra_state = {"uuid": self.collapsed.uuid, "collapsed": self.collapsed.value} if self.title is not None: - extra_state['title'] = self.title + extra_state["title"] = self.title if self.description is not None: - extra_state['description'] = self.description + extra_state["description"] = self.description return dict( **super(Container, self).__getstate__(), **extra_state, elements=[e.__getstate__() for e in self.elements] ) diff --git a/pros/common/ui/interactive/components/input.py b/pros/common/ui/interactive/components/input.py index 8d35b5e8..8a9a071a 100644 --- a/pros/common/ui/interactive/components/input.py +++ b/pros/common/ui/interactive/components/input.py @@ -15,7 +15,7 @@ def __init__(self, label: AnyStr, parameter: P, placeholder: Optional = None): def __getstate__(self) -> dict: extra_state = {} if self.placeholder is not None: - extra_state['placeholder'] = self.placeholder + extra_state["placeholder"] = self.placeholder return dict( **super(InputBox, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/label.py b/pros/common/ui/interactive/components/label.py index f4ac5592..df06ec95 100644 --- a/pros/common/ui/interactive/components/label.py +++ b/pros/common/ui/interactive/components/label.py @@ -25,4 +25,4 @@ class Spinner(Label): """ def __init__(self): - super(Spinner, self).__init__('Loading...') + super(Spinner, self).__init__("Loading...") diff --git a/pros/common/ui/interactive/observable.py b/pros/common/ui/interactive/observable.py index f2d14d92..61e00178 100644 --- a/pros/common/ui/interactive/observable.py +++ b/pros/common/ui/interactive/observable.py @@ -25,7 +25,7 @@ def notify(cls, uuid, event, *args, **kwargs): if uuid in _uuid_table: _uuid_table[uuid].trigger(event, *args, **kwargs) else: - logger(__name__).warning(f'Could not find an Observable to notify with UUID: {uuid}', sentry=True) + logger(__name__).warning(f"Could not find an Observable to notify with UUID: {uuid}", sentry=True) def on( self, diff --git a/pros/common/ui/interactive/parameters/__init__.py b/pros/common/ui/interactive/parameters/__init__.py index 8d397694..9185027b 100644 --- a/pros/common/ui/interactive/parameters/__init__.py +++ b/pros/common/ui/interactive/parameters/__init__.py @@ -3,10 +3,10 @@ from .validatable_parameter import AlwaysInvalidParameter, ValidatableParameter __all__ = [ - 'Parameter', - 'OptionParameter', - 'BooleanParameter', - 'ValidatableParameter', - 'RangeParameter', - 'AlwaysInvalidParameter', + "Parameter", + "OptionParameter", + "BooleanParameter", + "ValidatableParameter", + "RangeParameter", + "AlwaysInvalidParameter", ] diff --git a/pros/common/ui/interactive/parameters/misc_parameters.py b/pros/common/ui/interactive/parameters/misc_parameters.py index f19edba9..64cc2a38 100644 --- a/pros/common/ui/interactive/parameters/misc_parameters.py +++ b/pros/common/ui/interactive/parameters/misc_parameters.py @@ -3,7 +3,7 @@ from pros.common.ui.interactive.parameters.parameter import Parameter from pros.common.ui.interactive.parameters.validatable_parameter import ValidatableParameter -T = TypeVar('T') +T = TypeVar("T") class OptionParameter(ValidatableParameter, Generic[T]): @@ -17,8 +17,8 @@ def validate(self, value: Any): class BooleanParameter(Parameter[bool]): def update(self, new_value): - true_prefixes = ['T', 'Y'] - true_matches = ['1'] + true_prefixes = ["T", "Y"] + true_matches = ["1"] v = str(new_value).upper() is_true = v in true_matches or any(v.startswith(p) for p in true_prefixes) super(BooleanParameter, self).update(is_true) @@ -33,7 +33,7 @@ def validate(self, value: T): if self.range[0] <= value <= self.range[1]: return True else: - return f'{value} is not within [{self.range[0]}, {self.range[1]}]' + return f"{value} is not within [{self.range[0]}, {self.range[1]}]" def update(self, new_value): super(RangeParameter, self).update(int(new_value)) diff --git a/pros/common/ui/interactive/parameters/parameter.py b/pros/common/ui/interactive/parameters/parameter.py index 1c11eb5e..c1412e9a 100644 --- a/pros/common/ui/interactive/parameters/parameter.py +++ b/pros/common/ui/interactive/parameters/parameter.py @@ -2,7 +2,7 @@ from pros.common.ui.interactive.observable import Observable -T = TypeVar('T') +T = TypeVar("T") class Parameter(Observable, Generic[T]): @@ -17,11 +17,11 @@ def __init__(self, initial_value: T): super().__init__() self.value = initial_value - self.on('update', self.update) + self.on("update", self.update) def update(self, new_value): self.value = new_value - self.trigger('changed', self) + self.trigger("changed", self) def on_changed(self, *handlers: Callable, **kwargs): - return self.on('changed', *handlers, **kwargs) + return self.on("changed", *handlers, **kwargs) diff --git a/pros/common/ui/interactive/parameters/validatable_parameter.py b/pros/common/ui/interactive/parameters/validatable_parameter.py index 208c3bb3..e631a045 100644 --- a/pros/common/ui/interactive/parameters/validatable_parameter.py +++ b/pros/common/ui/interactive/parameters/validatable_parameter.py @@ -2,7 +2,7 @@ from pros.common.ui.interactive.parameters.parameter import Parameter -T = TypeVar('T') +T = TypeVar("T") class ValidatableParameter(Parameter, Generic[T]): @@ -44,19 +44,19 @@ def update(self, new_value): if self.allow_invalid_input or self.is_valid(new_value): super(ValidatableParameter, self).update(new_value) if self.is_valid(): - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) def on_changed(self, *handlers: Callable, **kwargs): """ Subscribe to event whenever value validly changes """ - return self.on('changed_validated', *handlers, **kwargs) + return self.on("changed_validated", *handlers, **kwargs) def on_any_changed(self, *handlers: Callable, **kwargs): """ Subscribe to event whenever value changes (regardless of whether or not new value is valid) """ - return self.on('changed', *handlers, **kwargs) + return self.on("changed", *handlers, **kwargs) class AlwaysInvalidParameter(ValidatableParameter[T], Generic[T]): diff --git a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py index 91f88c8e..5b348617 100644 --- a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py +++ b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py @@ -10,32 +10,32 @@ from ..application import Application from .Renderer import Renderer -current: List['MachineOutputRenderer'] = [] +current: List["MachineOutputRenderer"] = [] -def _push_renderer(renderer: 'MachineOutputRenderer'): +def _push_renderer(renderer: "MachineOutputRenderer"): global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current stack.append(renderer) -def _remove_renderer(renderer: 'MachineOutputRenderer'): +def _remove_renderer(renderer: "MachineOutputRenderer"): global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current if renderer in stack: stack.remove(renderer) -def _current_renderer() -> Optional['MachineOutputRenderer']: +def _current_renderer() -> Optional["MachineOutputRenderer"]: global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current return stack[-1] if len(stack) > 0 else None -P = TypeVar('P') +P = TypeVar("P") class MachineOutputRenderer(Renderer[P], Generic[P]): @@ -55,7 +55,7 @@ def on_redraw(): @staticmethod def get_line(): - line = click.get_text_stream('stdin').readline().strip() + line = click.get_text_stream("stdin").readline().strip() return line.strip() if line is not None else None def run(self) -> P: @@ -73,8 +73,8 @@ def run(self) -> P: try: value = json.loads(line) - if 'uuid' in value and 'event' in value: - Observable.notify(value['uuid'], value['event'], *value.get('args', []), **value.get('kwargs', {})) + if "uuid" in value and "event" in value: + Observable.notify(value["uuid"], value["event"], *value.get("args", []), **value.get("kwargs", {})) except json.JSONDecodeError as e: ui.logger(__name__).exception(e) except BaseException as e: @@ -85,16 +85,16 @@ def run(self) -> P: return self.run_rv def stop(self): - ui.logger(__name__).debug(f'Stopping {self.app}') + ui.logger(__name__).debug(f"Stopping {self.app}") self.alive = False if current_thread() != self.thread: - ui.logger(__name__).debug(f'Interrupting render thread of {self.app}') + ui.logger(__name__).debug(f"Interrupting render thread of {self.app}") while not self.stop_sem.acquire(timeout=0.1): self.wake_me() - ui.logger(__name__).debug(f'Broadcasting stop {self.app}') - self._output({'uuid': self.app.uuid, 'should_exit': True}) + ui.logger(__name__).debug(f"Broadcasting stop {self.app}") + self._output({"uuid": self.app.uuid, "should_exit": True}) _remove_renderer(self) top_renderer = _current_renderer() @@ -105,15 +105,15 @@ def wake_me(self): """ Hack to wake up input thread to know to shut down """ - ui.logger(__name__).debug(f'Broadcasting WAKEME for {self.app}') + ui.logger(__name__).debug(f"Broadcasting WAKEME for {self.app}") if ui.ismachineoutput(): - ui._machineoutput({'type': 'wakeme'}) + ui._machineoutput({"type": "wakeme"}) else: - ui.echo('Wake up the renderer!') + ui.echo("Wake up the renderer!") @staticmethod def _output(data: dict): - data['type'] = 'input/interactive' + data["type"] = "input/interactive" if ui.ismachineoutput(): ui._machineoutput(data) else: diff --git a/pros/common/ui/interactive/renderers/Renderer.py b/pros/common/ui/interactive/renderers/Renderer.py index 40f17a0e..2bbebf2a 100644 --- a/pros/common/ui/interactive/renderers/Renderer.py +++ b/pros/common/ui/interactive/renderers/Renderer.py @@ -2,7 +2,7 @@ from ..application import Application -P = TypeVar('P') +P = TypeVar("P") class Renderer(Generic[P]): diff --git a/pros/common/ui/log.py b/pros/common/ui/log.py index bc37e0ad..05ec8a85 100644 --- a/pros/common/ui/log.py +++ b/pros/common/ui/log.py @@ -21,18 +21,18 @@ def __init__(self, *args, ctx_obj=None, **kwargs): def emit(self, record): try: - if self.ctx_obj.get('machine_output', False): + if self.ctx_obj.get("machine_output", False): formatter = self.formatter or logging.Formatter() record.message = record.getMessage() obj = { - 'type': 'log/message', - 'level': record.levelname, - 'message': formatter.formatMessage(record), - 'simpleMessage': record.message, + "type": "log/message", + "level": record.levelname, + "message": formatter.formatMessage(record), + "simpleMessage": record.message, } if record.exc_info: - obj['trace'] = formatter.formatException(record.exc_info) - msg = f'Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}' + obj["trace"] = formatter.formatException(record.exc_info) + msg = f"Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}" else: msg = self.format(record) click.echo(msg) @@ -47,6 +47,6 @@ class PROSLogFormatter(logging.Formatter): def formatException(self, ei): if not isdebug(): - return '\n'.join(super().formatException(ei).split('\n')[-3:]) + return "\n".join(super().formatException(ei).split("\n")[-3:]) else: return super().formatException(ei) diff --git a/pros/common/utils.py b/pros/common/utils.py index 56e0f08c..2c771846 100644 --- a/pros/common/utils.py +++ b/pros/common/utils.py @@ -13,13 +13,13 @@ @lru_cache(1) def get_version(): try: - ver = open(os.path.join(os.path.dirname(__file__), '..', '..', 'version')).read().strip() + ver = open(os.path.join(os.path.dirname(__file__), "..", "..", "version")).read().strip() if ver is not None: return ver except: pass try: - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): import _constants ver = _constants.CLI_VERSION @@ -36,13 +36,13 @@ def get_version(): module = pros.cli.main.__name__ for dist in pkg_resources.working_set: - scripts = dist.get_entry_map().get('console_scripts') or {} + scripts = dist.get_entry_map().get("console_scripts") or {} for _, entry_point in iter(scripts.items()): if entry_point.module_name == module: ver = dist.version if ver is not None: return ver - raise RuntimeError('Could not determine version') + raise RuntimeError("Could not determine version") def retries(func, retry: int = 3): @@ -79,13 +79,13 @@ def ismachineoutput(ctx: click.Context = None) -> bool: if isinstance(ctx, click.Context): ctx.ensure_object(dict) assert isinstance(ctx.obj, dict) - return ctx.obj.get('machine_output', False) + return ctx.obj.get("machine_output", False) else: return False def get_pros_dir(): - return click.get_app_dir('PROS') + return click.get_app_dir("PROS") def with_click_context(func): @@ -121,9 +121,9 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non response = requests.get(url, stream=True) if response.status_code == 200: - filename: str = url.rsplit('/', 1)[-1] - if 'Content-Disposition' in response.headers.keys(): - filename = re.findall("filename=(.+)", response.headers['Content-Disposition'])[0] + filename: str = url.rsplit("/", 1)[-1] + if "Content-Disposition" in response.headers.keys(): + filename = re.findall("filename=(.+)", response.headers["Content-Disposition"])[0] # try: # disposition = parse_requests_response(response) # if isinstance(ext, str): @@ -132,16 +132,16 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non # filename = disposition.filename_unsafe # except RuntimeError: # pass - output_path = os.path.join(get_pros_dir(), 'download', filename) + output_path = os.path.join(get_pros_dir(), "download", filename) if os.path.exists(output_path): os.remove(output_path) elif not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path), exist_ok=True) - with open(output_path, mode='wb') as file: + with open(output_path, mode="wb") as file: with progressbar( - length=int(response.headers['Content-Length']), label=desc or f'Downloading {filename}' + length=int(response.headers["Content-Length"]), label=desc or f"Downloading {filename}" ) as pb: for chunk in response.iter_content(256): file.write(chunk) diff --git a/pros/conductor/__init__.py b/pros/conductor/__init__.py index 51ac1e34..e866d1b1 100644 --- a/pros/conductor/__init__.py +++ b/pros/conductor/__init__.py @@ -1,4 +1,4 @@ -__all__ = ['BaseTemplate', 'Template', 'LocalTemplate', 'Depot', 'LocalDepot', 'Project', 'Conductor'] +__all__ = ["BaseTemplate", "Template", "LocalTemplate", "Depot", "LocalDepot", "Project", "Conductor"] from .conductor import Conductor from .depots import Depot, LocalDepot diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index 021e981d..5826cc2a 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -16,10 +16,10 @@ from .project import Project from .templates import BaseTemplate, ExternalTemplate, LocalTemplate, Template -MAINLINE_NAME = 'pros-mainline' -MAINLINE_URL = 'https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json' -EARLY_ACCESS_NAME = 'kernel-early-access-mainline' -EARLY_ACCESS_URL = 'https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json' +MAINLINE_NAME = "pros-mainline" +MAINLINE_URL = "https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json" +EARLY_ACCESS_NAME = "kernel-early-access-mainline" +EARLY_ACCESS_URL = "https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json" """ # TBD? Currently, EarlyAccess value is stored in config file @@ -36,11 +36,11 @@ class Conductor(Config): def __init__(self, file=None): if not file: - file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') + file = os.path.join(click.get_app_dir("PROS"), "conductor.pros") self.local_templates: Set[LocalTemplate] = set() self.early_access_local_templates: Set[LocalTemplate] = set() self.depots: Dict[str, Depot] = {} - self.default_target: str = 'v5' + self.default_target: str = "v5" self.default_libraries: Dict[str, List[str]] = None self.early_access_libraries: Dict[str, List[str]] = None self.use_early_access = False @@ -63,25 +63,25 @@ def __init__(self, file=None): self.depots[EARLY_ACCESS_NAME] = HttpDepot(EARLY_ACCESS_NAME, EARLY_ACCESS_URL) needs_saving = True if self.default_target is None: - self.default_target = 'v5' + self.default_target = "v5" needs_saving = True if self.default_libraries is None: - self.default_libraries = {'v5': ['okapilib'], 'cortex': []} + self.default_libraries = {"v5": ["okapilib"], "cortex": []} needs_saving = True - if self.early_access_libraries is None or len(self.early_access_libraries['v5']) != 2: - self.early_access_libraries = {'v5': ['liblvgl', 'okapilib'], 'cortex': []} + if self.early_access_libraries is None or len(self.early_access_libraries["v5"]) != 2: + self.early_access_libraries = {"v5": ["liblvgl", "okapilib"], "cortex": []} needs_saving = True - if 'v5' not in self.default_libraries: - self.default_libraries['v5'] = [] + if "v5" not in self.default_libraries: + self.default_libraries["v5"] = [] needs_saving = True - if 'cortex' not in self.default_libraries: - self.default_libraries['cortex'] = [] + if "cortex" not in self.default_libraries: + self.default_libraries["cortex"] = [] needs_saving = True - if 'v5' not in self.early_access_libraries: - self.early_access_libraries['v5'] = [] + if "v5" not in self.early_access_libraries: + self.early_access_libraries["v5"] = [] needs_saving = True - if 'cortex' not in self.early_access_libraries: - self.early_access_libraries['cortex'] = [] + if "cortex" not in self.early_access_libraries: + self.early_access_libraries["cortex"] = [] needs_saving = True if needs_saving: self.save() @@ -97,18 +97,18 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca if t.identifier == template.identifier: self.purge_template(t) - if 'destination' in kwargs: # this is deprecated, will work (maybe) but not desirable behavior - destination = kwargs.pop('destination') + if "destination" in kwargs: # this is deprecated, will work (maybe) but not desirable behavior + destination = kwargs.pop("destination") else: - destination = os.path.join(self.directory, 'templates', template.identifier) + destination = os.path.join(self.directory, "templates", template.identifier) if os.path.isdir(destination): shutil.rmtree(destination) template: Template = depot.fetch_template(template, destination, **kwargs) - click.secho(f'Fetched {template.identifier} from {depot.name} depot', dim=True) + click.secho(f"Fetched {template.identifier} from {depot.name} depot", dim=True) local_template = LocalTemplate(orig=template, location=destination) - local_template.metadata['origin'] = depot.name - click.echo(f'Adding {local_template.identifier} to registry...', nl=False) + local_template.metadata["origin"] = depot.name + click.echo(f"Adding {local_template.identifier} to registry...", nl=False) if depot.name == EARLY_ACCESS_NAME: # check for early access self.early_access_local_templates.add(local_template) else: @@ -116,11 +116,11 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca self.save() if isinstance(template, ExternalTemplate) and template.directory == destination: template.delete() - click.secho('Done', fg='green') + click.secho("Done", fg="green") return local_template def purge_template(self, template: LocalTemplate): - if template.metadata['origin'] == EARLY_ACCESS_NAME: + if template.metadata["origin"] == EARLY_ACCESS_NAME: if template not in self.early_access_local_templates: logger(__name__).info( f"{template.identifier} was not in the Conductor's local early access templates cache." @@ -134,7 +134,7 @@ def purge_template(self, template: LocalTemplate): self.local_templates.remove(template) if os.path.abspath(template.location).startswith( - os.path.abspath(os.path.join(self.directory, 'templates')) + os.path.abspath(os.path.join(self.directory, "templates")) ) and os.path.isdir(template.location): shutil.rmtree(template.location) self.save() @@ -149,9 +149,9 @@ def resolve_templates( **kwargs, ) -> List[BaseTemplate]: results = list() if not unique else set() - kernel_version = kwargs.get('kernel_version', None) - if kwargs.get('early_access', None) is not None: - self.use_early_access = kwargs.get('early_access', False) + kernel_version = kwargs.get("kernel_version", None) + if kwargs.get("early_access", None) is not None: + self.use_early_access = kwargs.get("early_access", False) if isinstance(identifier, str): query = BaseTemplate.create_query(name=identifier, **kwargs) else: @@ -187,34 +187,34 @@ def resolve_templates( results.update(online_results) else: results.extend(online_results) - logger(__name__).debug('Saving Conductor config after checking for remote updates') + logger(__name__).debug("Saving Conductor config after checking for remote updates") self.save() # Save self since there may have been some updates from the depots if len(results) == 0 and not self.use_early_access: raise dont_send( - InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}') + InvalidTemplateException(f"{identifier.name} does not support kernel version {kernel_version}") ) return list(results) def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Optional[BaseTemplate]: if isinstance(identifier, str): - kwargs['name'] = identifier + kwargs["name"] = identifier elif isinstance(identifier, BaseTemplate): - kwargs['orig'] = identifier + kwargs["orig"] = identifier query = BaseTemplate.create_query(**kwargs) - logger(__name__).info(f'Query: {query}') + logger(__name__).info(f"Query: {query}") logger(__name__).debug(query.__dict__) templates = self.resolve_templates(query, **kwargs) logger(__name__).info(f'Candidates: {", ".join([str(t) for t in templates])}') if not any(templates): return None - query.version = str(Spec(query.version or '>0').select([Version(t.version) for t in templates])) + query.version = str(Spec(query.version or ">0").select([Version(t.version) for t in templates])) v = Version(query.version) - v.prerelease = v.prerelease if len(v.prerelease) else ('',) - v.build = v.build if len(v.build) else ('',) - query.version = f'=={v}' - logger(__name__).info(f'Resolved to {query.identifier}') + v.prerelease = v.prerelease if len(v.prerelease) else ("",) + v.build = v.build if len(v.build) else ("",) + query.version = f"=={v}" + logger(__name__).info(f"Resolved to {query.identifier}") templates = self.resolve_templates(query, **kwargs) if not any(templates): return None @@ -224,11 +224,11 @@ def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Op # there's a local template satisfying the query if len(local_templates) > 1: # This should never happen! Conductor state must be invalid - raise Exception(f'Multiple local templates satisfy {query.identifier}!') + raise Exception(f"Multiple local templates satisfy {query.identifier}!") return local_templates[0] # prefer pros-mainline template second - mainline_templates = [t for t in templates if t.metadata['origin'] == 'pros-mainline'] + mainline_templates = [t for t in templates if t.metadata["origin"] == "pros-mainline"] if any(mainline_templates): return mainline_templates[0] @@ -236,52 +236,52 @@ def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Op return templates[0] def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], **kwargs): - upgrade_ok = kwargs.get('upgrade_ok', True) - install_ok = kwargs.get('install_ok', True) - downgrade_ok = kwargs.get('downgrade_ok', True) - download_ok = kwargs.get('download_ok', True) - force = kwargs.get('force_apply', False) - - kwargs['target'] = project.target - if 'kernel' in project.templates: + upgrade_ok = kwargs.get("upgrade_ok", True) + install_ok = kwargs.get("install_ok", True) + downgrade_ok = kwargs.get("downgrade_ok", True) + download_ok = kwargs.get("download_ok", True) + force = kwargs.get("force_apply", False) + + kwargs["target"] = project.target + if "kernel" in project.templates: # support_kernels for backwards compatibility, but kernel_version should be getting most of the exposure - kwargs['kernel_version'] = kwargs['supported_kernels'] = project.templates['kernel'].version + kwargs["kernel_version"] = kwargs["supported_kernels"] = project.templates["kernel"].version template = self.resolve_template(identifier=identifier, allow_online=download_ok, **kwargs) if template is None: raise dont_send( - InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}') + InvalidTemplateException(f"Could not find a template satisfying {identifier} for {project.target}") ) # warn and prompt user if upgrading to PROS 4 or downgrading to PROS 3 - if template.name == 'kernel': + if template.name == "kernel": isProject = Project.find_project("") if isProject: curr_proj = Project() if curr_proj.kernel: - if template.version[0] == '4' and curr_proj.kernel[0] == '3': + if template.version[0] == "4" and curr_proj.kernel[0] == "3": confirm = ui.confirm( - f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' - f'Do you still want to upgrade?' + f"Warning! Upgrading project to PROS 4 will cause breaking changes. " + f"Do you still want to upgrade?" ) if not confirm: - raise dont_send(InvalidTemplateException(f'Not upgrading')) - if template.version[0] == '3' and curr_proj.kernel[0] == '4': + raise dont_send(InvalidTemplateException(f"Not upgrading")) + if template.version[0] == "3" and curr_proj.kernel[0] == "4": confirm = ui.confirm( - f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' - f'Do you still want to downgrade?' + f"Warning! Downgrading project to PROS 3 will cause breaking changes. " + f"Do you still want to downgrade?" ) if not confirm: - raise dont_send(InvalidTemplateException(f'Not downgrading')) - elif not self.use_early_access and template.version[0] == '3' and not self.warn_early_access: + raise dont_send(InvalidTemplateException(f"Not downgrading")) + elif not self.use_early_access and template.version[0] == "3" and not self.warn_early_access: confirm = ui.confirm( - f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?' + f"PROS 4 is now in early access. " + f"Please use the --early-access flag if you would like to use it.\n" + f"Do you want to use PROS 4 instead?" ) self.warn_early_access = True if confirm: # use pros 4 self.use_early_access = True - kwargs['version'] = '>=0' + kwargs["version"] = ">=0" self.save() # Recall the function with early access enabled return self.apply_template(project, identifier, **kwargs) @@ -289,14 +289,14 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], self.save() if not isinstance(template, LocalTemplate): with ui.Notification(): - template = self.fetch_template(self.get_depot(template.metadata['origin']), template, **kwargs) + template = self.fetch_template(self.get_depot(template.metadata["origin"]), template, **kwargs) assert isinstance(template, LocalTemplate) logger(__name__).info(str(project)) valid_action = project.get_template_actions(template) if valid_action == TemplateAction.NotApplicable: raise dont_send( - InvalidTemplateException(f'{template.identifier} is not applicable to {project}', reason=valid_action) + InvalidTemplateException(f"{template.identifier} is not applicable to {project}", reason=valid_action) ) if ( force @@ -306,21 +306,21 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], ): project.apply_template( template, - force_system=kwargs.pop('force_system', False), - force_user=kwargs.pop('force_user', False), - remove_empty_directories=kwargs.pop('remove_empty_directories', False), + force_system=kwargs.pop("force_system", False), + force_user=kwargs.pop("force_user", False), + remove_empty_directories=kwargs.pop("remove_empty_directories", False), ) - ui.finalize('apply', f'Finished applying {template.identifier} to {project.location}') + ui.finalize("apply", f"Finished applying {template.identifier} to {project.location}") elif valid_action != TemplateAction.AlreadyInstalled: raise dont_send( InvalidTemplateException( - f'Could not install {template.identifier} because it is {valid_action.name},' - f' and that is not allowed.', + f"Could not install {template.identifier} because it is {valid_action.name}," + f" and that is not allowed.", reason=valid_action, ) ) else: - ui.finalize('apply', f'{template.identifier} is already installed in {project.location}') + ui.finalize("apply", f"{template.identifier} is already installed in {project.location}") @staticmethod def remove_template( @@ -329,72 +329,72 @@ def remove_template( remove_user: bool = True, remove_empty_directories: bool = True, ): - ui.logger(__name__).debug(f'Uninstalling templates matching {identifier}') + ui.logger(__name__).debug(f"Uninstalling templates matching {identifier}") if not project.resolve_template(identifier): ui.echo(f"{identifier} is not an applicable template") for template in project.resolve_template(identifier): - ui.echo(f'Uninstalling {template.identifier}') + ui.echo(f"Uninstalling {template.identifier}") project.remove_template( template, remove_user=remove_user, remove_empty_directories=remove_empty_directories ) def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: - if kwargs.get('early_access', None) is not None: - self.use_early_access = kwargs.get('early_access', False) + if kwargs.get("early_access", None) is not None: + self.use_early_access = kwargs.get("early_access", False) if kwargs["version_source"]: # If true, then the user has not specified a version if not self.use_early_access and self.warn_early_access: ui.echo(f"PROS 4 is now in early access. " f"If you would like to use it, use the --early-access flag.") elif self.use_early_access: - ui.echo(f'Early access is enabled. Using PROS 4.') + ui.echo(f"Early access is enabled. Using PROS 4.") elif self.use_early_access: - ui.echo(f'Early access is enabled.') + ui.echo(f"Early access is enabled.") - if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): - raise dont_send(ValueError('Will not create a project in user home directory')) + if Path(path).exists() and Path(path).samefile(os.path.expanduser("~")): + raise dont_send(ValueError("Will not create a project in user home directory")) for char in str(Path(path)): if ( char in [ - '?', - '<', - '>', - '*', - '|', - '^', - '#', - '%', - '&', - '$', - '+', - '!', - '`', - '\'', - '=', - '@', - '\'', - '{', - '}', - '[', - ']', - '(', - ')', - '~', + "?", + "<", + ">", + "*", + "|", + "^", + "#", + "%", + "&", + "$", + "+", + "!", + "`", + "'", + "=", + "@", + "'", + "{", + "}", + "[", + "]", + "(", + ")", + "~", ] or ord(char) > 127 ): - raise dont_send(ValueError(f'Invalid character found in directory name: \'{char}\'')) + raise dont_send(ValueError(f"Invalid character found in directory name: '{char}'")) proj = Project(path=path, create=True) - if 'target' in kwargs: - proj.target = kwargs['target'] - if 'project_name' in kwargs and kwargs['project_name'] and not kwargs['project_name'].isspace(): - proj.project_name = kwargs['project_name'] + if "target" in kwargs: + proj.target = kwargs["target"] + if "project_name" in kwargs and kwargs["project_name"] and not kwargs["project_name"].isspace(): + proj.project_name = kwargs["project_name"] else: proj.project_name = os.path.basename(os.path.normpath(os.path.abspath(path))) - if 'version' in kwargs: - if kwargs['version'] == 'latest': - kwargs['version'] = '>=0' - self.apply_template(proj, identifier='kernel', **kwargs) + if "version" in kwargs: + if kwargs["version"] == "latest": + kwargs["version"] = ">=0" + self.apply_template(proj, identifier="kernel", **kwargs) proj.save() if not no_default_libs: @@ -407,8 +407,8 @@ def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Pro for library in libraries[proj.target]: try: # remove kernel version so that latest template satisfying query is correctly selected - if 'version' in kwargs: - kwargs.pop('version') + if "version" in kwargs: + kwargs.pop("version") self.apply_template(proj, library, **kwargs) except Exception as e: logger(__name__).exception(e) @@ -423,4 +423,4 @@ def remove_depot(self, name: str): self.save() def query_depots(self, url: bool): - return [name + ((' -- ' + depot.location) if url else '') for name, depot in self.depots.items()] + return [name + ((" -- " + depot.location) if url else "") for name, depot in self.depots.items()] diff --git a/pros/conductor/depots/depot.py b/pros/conductor/depots/depot.py index a0787f43..33b349f2 100644 --- a/pros/conductor/depots/depot.py +++ b/pros/conductor/depots/depot.py @@ -33,16 +33,16 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> def get_remote_templates(self, auto_check_freq: Optional[timedelta] = None, force_check: bool = False, **kwargs): if auto_check_freq is None: - auto_check_freq = getattr(self, 'update_frequency', cli_config().update_frequency) + auto_check_freq = getattr(self, "update_frequency", cli_config().update_frequency) logger(__name__).info( - f'Last check of {self.name} was {self.last_remote_update} ' - f'({datetime.now() - self.last_remote_update} vs {auto_check_freq}).' + f"Last check of {self.name} was {self.last_remote_update} " + f"({datetime.now() - self.last_remote_update} vs {auto_check_freq})." ) if force_check or datetime.now() - self.last_remote_update > auto_check_freq: with ui.Notification(): - ui.echo(f'Updating {self.name}... ', nl=False) + ui.echo(f"Updating {self.name}... ", nl=False) self.update_remote_templates(**kwargs) - ui.echo('Done', color='green') + ui.echo("Done", color="green") for t in self.remote_templates: - t.metadata['origin'] = self.name + t.metadata["origin"] = self.name return self.remote_templates diff --git a/pros/conductor/depots/http_depot.py b/pros/conductor/depots/http_depot.py index 652b8c6e..feda9472 100644 --- a/pros/conductor/depots/http_depot.py +++ b/pros/conductor/depots/http_depot.py @@ -21,18 +21,18 @@ def __init__(self, name: str, location: str): def fetch_template(self, template: BaseTemplate, destination: str, **kwargs): import requests - assert 'location' in template.metadata - url = template.metadata['location'] - tf = download_file(url, ext='zip', desc=f'Downloading {template.identifier}') + assert "location" in template.metadata + url = template.metadata["location"] + tf = download_file(url, ext="zip", desc=f"Downloading {template.identifier}") if tf is None: - raise requests.ConnectionError(f'Could not obtain {url}') + raise requests.ConnectionError(f"Could not obtain {url}") with zipfile.ZipFile(tf) as zf: - with ui.progressbar(length=len(zf.namelist()), label=f'Extracting {template.identifier}') as pb: + with ui.progressbar(length=len(zf.namelist()), label=f"Extracting {template.identifier}") as pb: for file in zf.namelist(): zf.extract(file, path=destination) pb.update(1) os.remove(tf) - return ExternalTemplate(file=os.path.join(destination, 'template.pros')) + return ExternalTemplate(file=os.path.join(destination, "template.pros")) def update_remote_templates(self, **_): import requests @@ -41,5 +41,5 @@ def update_remote_templates(self, **_): if response.status_code == 200: self.remote_templates = jsonpickle.decode(response.text) else: - logger(__name__).warning(f'Unable to access {self.name} ({self.location}): {response.status_code}') + logger(__name__).warning(f"Unable to access {self.name} ({self.location}): {response.status_code}") self.last_remote_update = datetime.now() diff --git a/pros/conductor/depots/local_depot.py b/pros/conductor/depots/local_depot.py index 181ed581..0dbdb9a6 100644 --- a/pros/conductor/depots/local_depot.py +++ b/pros/conductor/depots/local_depot.py @@ -13,22 +13,22 @@ class LocalDepot(Depot): def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> Template: - if 'location' not in kwargs: + if "location" not in kwargs: logger(__name__).debug(f"Template not specified. Provided arguments: {kwargs}") - raise KeyError('Location of local template must be specified.') - location = kwargs['location'] + raise KeyError("Location of local template must be specified.") + location = kwargs["location"] if os.path.isdir(location): location_dir = location - if not os.path.isfile(os.path.join(location_dir, 'template.pros')): - raise ConfigNotFoundException(f'A template.pros file was not found in {location_dir}.') - template_file = os.path.join(location_dir, 'template.pros') + if not os.path.isfile(os.path.join(location_dir, "template.pros")): + raise ConfigNotFoundException(f"A template.pros file was not found in {location_dir}.") + template_file = os.path.join(location_dir, "template.pros") elif zipfile.is_zipfile(location): with zipfile.ZipFile(location) as zf: with click.progressbar(length=len(zf.namelist()), label=f"Extracting {location}") as progress_bar: for file in zf.namelist(): zf.extract(file, path=destination) progress_bar.update(1) - template_file = os.path.join(destination, 'template.pros') + template_file = os.path.join(destination, "template.pros") location_dir = destination elif os.path.isfile(location): location_dir = os.path.dirname(location) @@ -40,7 +40,7 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> raise ValueError(f"The specified location was not a file or directory ({location}).") if location_dir != destination: n_files = len([os.path.join(dp, f) for dp, dn, fn in os.walk(location_dir) for f in fn]) - with click.progressbar(length=n_files, label='Copying to local cache') as pb: + with click.progressbar(length=n_files, label="Copying to local cache") as pb: def my_copy(*args): pb.update(1) @@ -50,4 +50,4 @@ def my_copy(*args): return ExternalTemplate(file=template_file) def __init__(self): - super().__init__('local', 'local') + super().__init__("local", "local") diff --git a/pros/conductor/interactive/NewProjectModal.py b/pros/conductor/interactive/NewProjectModal.py index 17132ff0..552f0b73 100644 --- a/pros/conductor/interactive/NewProjectModal.py +++ b/pros/conductor/interactive/NewProjectModal.py @@ -11,8 +11,8 @@ class NewProjectModal(application.Modal[None]): - targets = parameters.OptionParameter('v5', ['v5', 'cortex']) - kernel_versions = parameters.OptionParameter('latest', ['latest']) + targets = parameters.OptionParameter("v5", ["v5", "cortex"]) + kernel_versions = parameters.OptionParameter("latest", ["latest"]) install_default_libraries = parameters.BooleanParameter(True) project_name = parameters.Parameter(None) @@ -22,9 +22,9 @@ def __init__( self, ctx: Context = None, conductor: Optional[Conductor] = None, - directory=os.path.join(os.path.expanduser('~'), 'My PROS Project'), + directory=os.path.join(os.path.expanduser("~"), "My PROS Project"), ): - super().__init__('Create a new project') + super().__init__("Create a new project") self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() self.directory = NonExistentProjectParameter(directory) @@ -33,11 +33,11 @@ def __init__( cb(self.targets) def target_changed(self, new_target): - templates = self.conductor.resolve_templates('kernel', target=new_target.value) + templates = self.conductor.resolve_templates("kernel", target=new_target.value) if len(templates) == 0: - self.kernel_versions.options = ['latest'] + self.kernel_versions.options = ["latest"] else: - self.kernel_versions.options = ['latest'] + sorted({t.version for t in templates}, reverse=True) + self.kernel_versions.options = ["latest"] + sorted({t.version for t in templates}, reverse=True) self.redraw() def confirm(self, *args, **kwargs): @@ -54,10 +54,10 @@ def confirm(self, *args, **kwargs): from pros.conductor.project import ProjectReport report = ProjectReport(project) - ui.finalize('project-report', report) + ui.finalize("project-report", report) with ui.Notification(): - ui.echo('Building project...') + ui.echo("Building project...") project.compile([]) @property @@ -65,15 +65,15 @@ def can_confirm(self): return self.directory.is_valid() and self.targets.is_valid() and self.kernel_versions.is_valid() def build(self) -> Generator[components.Component, None, None]: - yield components.DirectorySelector('Project Directory', self.directory) - yield components.ButtonGroup('Target', self.targets) + yield components.DirectorySelector("Project Directory", self.directory) + yield components.ButtonGroup("Target", self.targets) project_name_placeholder = os.path.basename(os.path.normpath(os.path.abspath(self.directory.value))) yield components.Container( - components.InputBox('Project Name', self.project_name, placeholder=project_name_placeholder), - components.DropDownBox('Kernel Version', self.kernel_versions), - components.Checkbox('Install default libraries', self.install_default_libraries), - title='Advanced', + components.InputBox("Project Name", self.project_name, placeholder=project_name_placeholder), + components.DropDownBox("Kernel Version", self.kernel_versions), + components.Checkbox("Install default libraries", self.install_default_libraries), + title="Advanced", collapsed=self.advanced_collapsed, ) diff --git a/pros/conductor/interactive/UpdateProjectModal.py b/pros/conductor/interactive/UpdateProjectModal.py index fecb6cb8..4e3943cd 100644 --- a/pros/conductor/interactive/UpdateProjectModal.py +++ b/pros/conductor/interactive/UpdateProjectModal.py @@ -46,7 +46,7 @@ def _add_template(self): ui.logger(__name__).debug(options) p = TemplateParameter(None, options) - @p.on('removed') + @p.on("removed") def remove_template(): self.new_templates.remove(p) @@ -55,14 +55,14 @@ def remove_template(): def __init__( self, ctx: Optional[Context] = None, conductor: Optional[Conductor] = None, project: Optional[Project] = None ): - super().__init__('Update a project') + super().__init__("Update a project") self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() self._is_processing = False self.project: Optional[Project] = project self.project_path = ExistingProjectParameter( - str(project.location) if project else os.path.join(os.path.expanduser('~'), 'My PROS Project') + str(project.location) if project else os.path.join(os.path.expanduser("~"), "My PROS Project") ) self.name = parameters.Parameter(None) @@ -74,7 +74,7 @@ def __init__( self.templates_collapsed = parameters.BooleanParameter(False) self.advanced_collapsed = parameters.BooleanParameter(True) - self.add_template_button = components.Button('Add Template') + self.add_template_button = components.Button("Add Template") self.add_template_button.on_clicked(self._add_template) @@ -92,7 +92,7 @@ def project_changed(self, new_project: ExistingProjectParameter): self.current_kernel = TemplateParameter( None, options=sorted( - {t for t in self.conductor.resolve_templates(self.project.templates['kernel'].as_query())}, + {t for t in self.conductor.resolve_templates(self.project.templates["kernel"].as_query())}, key=lambda v: Version(v.version), reverse=True, ), @@ -107,7 +107,7 @@ def project_changed(self, new_project: ExistingProjectParameter): ), ) for t in self.project.templates.values() - if t.name != 'kernel' + if t.name != "kernel" ] self.new_templates = [] @@ -124,13 +124,13 @@ def can_confirm(self): return self.project and self._generate_transaction().can_execute() def build(self) -> Generator[components.Component, None, None]: - yield components.DirectorySelector('Project Directory', self.project_path) + yield components.DirectorySelector("Project Directory", self.project_path) if self.is_processing: yield components.Spinner() elif self.project_path.is_valid(): assert self.project is not None - yield components.Label(f'Modify your {self.project.target} project.') - yield components.InputBox('Project Name', self.name) + yield components.Label(f"Modify your {self.project.target} project.") + yield components.InputBox("Project Name", self.name) yield TemplateListingComponent(self.current_kernel, editable=dict(version=True), removable=False) yield components.Container( *( @@ -139,12 +139,12 @@ def build(self) -> Generator[components.Component, None, None]: ), *(TemplateListingComponent(t, editable=True, removable=True) for t in self.new_templates), self.add_template_button, - title='Templates', + title="Templates", collapsed=self.templates_collapsed, ) yield components.Container( - components.Checkbox('Re-apply all templates', self.force_apply_parameter), - title='Advanced', + components.Checkbox("Re-apply all templates", self.force_apply_parameter), + title="Advanced", collapsed=self.advanced_collapsed, ) yield components.Label('What will happen when you click "Continue":') diff --git a/pros/conductor/interactive/components.py b/pros/conductor/interactive/components.py index cc848fa5..e44b702a 100644 --- a/pros/conductor/interactive/components.py +++ b/pros/conductor/interactive/components.py @@ -7,23 +7,23 @@ class TemplateListingComponent(components.Container): def _generate_components(self) -> Generator[components.Component, None, None]: - if not self.editable['name'] and not self.editable['version']: + if not self.editable["name"] and not self.editable["version"]: yield components.Label(self.template.value.identifier) else: - if self.editable['name']: - yield components.InputBox('Name', self.template.name) + if self.editable["name"]: + yield components.InputBox("Name", self.template.name) else: yield components.Label(self.template.value.name) - if self.editable['version']: + if self.editable["version"]: if isinstance(self.template.version, parameters.OptionParameter): - yield components.DropDownBox('Version', self.template.version) + yield components.DropDownBox("Version", self.template.version) else: - yield components.InputBox('Version', self.template.version) + yield components.InputBox("Version", self.template.version) else: yield components.Label(self.template.value.version) if self.removable: - remove_button = components.Button('Don\'t remove' if self.template.removed else 'Remove') - remove_button.on_clicked(lambda: self.template.trigger('removed')) + remove_button = components.Button("Don't remove" if self.template.removed else "Remove") + remove_button.on_clicked(lambda: self.template.trigger("removed")) yield remove_button def __init__( diff --git a/pros/conductor/interactive/parameters.py b/pros/conductor/interactive/parameters.py index 9f05b632..486a0719 100644 --- a/pros/conductor/interactive/parameters.py +++ b/pros/conductor/interactive/parameters.py @@ -13,24 +13,24 @@ class NonExistentProjectParameter(p.ValidatableParameter[str]): def validate(self, value: str) -> Union[bool, str]: value = os.path.abspath(value) if os.path.isfile(value): - return 'Path is a file' + return "Path is a file" if os.path.isdir(value) and not os.access(value, os.W_OK): - return 'Do not have write permission to path' + return "Do not have write permission to path" if Project.find_project(value) is not None: - return 'Project path already exists, delete it first' + return "Project path already exists, delete it first" blacklisted_directories = [] # TODO: Proper Windows support - if sys.platform == 'win32': + if sys.platform == "win32": blacklisted_directories.extend( [ - os.environ.get('WINDIR', os.path.join('C:', 'Windows')), - os.environ.get('PROGRAMFILES', os.path.join('C:', 'Program Files')), + os.environ.get("WINDIR", os.path.join("C:", "Windows")), + os.environ.get("PROGRAMFILES", os.path.join("C:", "Program Files")), ] ) if any(value.startswith(d) for d in blacklisted_directories): - return 'Cannot create project in a system directory' - if Path(value).exists() and Path(value).samefile(os.path.expanduser('~')): - return 'Should not create a project in home directory' + return "Cannot create project in a system directory" + if Path(value).exists() and Path(value).samefile(os.path.expanduser("~")): + return "Should not create a project in home directory" if not os.path.exists(value): parent = os.path.split(value)[0] while parent and not os.path.exists(parent): @@ -39,13 +39,13 @@ def validate(self, value: str) -> Union[bool, str]: break parent = temp_value if not parent: - return 'Cannot create directory because root does not exist' + return "Cannot create directory because root does not exist" if not os.path.exists(parent): - return f'Cannot create directory because {parent} does not exist' + return f"Cannot create directory because {parent} does not exist" if not os.path.isdir(parent): - return f'Cannot create directory because {parent} is a file' + return f"Cannot create directory because {parent} is a file" if not os.access(parent, os.W_OK | os.X_OK): - return f'Cannot create directory because missing write permissions to {parent}' + return f"Cannot create directory because missing write permissions to {parent}" return True @@ -58,7 +58,7 @@ def update(self, new_value): def validate(self, value: str): project = Project.find_project(value) - return project is not None or 'Path is not inside a PROS project' + return project is not None or "Path is not inside a PROS project" class TemplateParameter(p.ValidatableParameter[BaseTemplate]): @@ -73,29 +73,29 @@ def _update_versions(self): self.version.value = self.version.options[0] self.value = self.options[self.name.value][self.version.value] - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) else: self.version = p.AlwaysInvalidParameter(self.value.version) def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate], allow_invalid_input: bool = True): if not template and len(options) == 0: - raise ValueError('At least template or versions must be defined for a TemplateParameter') + raise ValueError("At least template or versions must be defined for a TemplateParameter") self.options = {t.name: {_t.version: _t for _t in options if t.name == _t.name} for t in options} if not template: first_template = list(self.options.values())[0] - template = first_template[str(Spec('>0').select([Version(v) for v in first_template.keys()]))] + template = first_template[str(Spec(">0").select([Version(v) for v in first_template.keys()]))] super().__init__(template, allow_invalid_input) self.name: p.ValidatableParameter[str] = p.ValidatableParameter( self.value.name, allow_invalid_input, - validate=lambda v: True if v in self.options.keys() else f'Could not find a template named {v}', + validate=lambda v: True if v in self.options.keys() else f"Could not find a template named {v}", ) if not self.value.version and self.value.name in self.options: - self.value.version = Spec('>0').select([Version(v) for v in self.options[self.value.name].keys()]) + self.value.version = Spec(">0").select([Version(v) for v in self.options[self.value.name].keys()]) self.version = None self._update_versions() @@ -103,23 +103,23 @@ def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate] @self.name.on_any_changed def name_any_changed(v: p.ValidatableParameter): self._update_versions() - self.trigger('changed', self) + self.trigger("changed", self) @self.version.on_any_changed def version_any_changed(v: p.ValidatableParameter): if v.value in self.options[self.name.value].keys(): self.value = self.options[self.name.value][v.value] - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) else: self.value.version = v.value - self.trigger('changed', self) + self.trigger("changed", self) # self.name.on_changed(lambda v: self.trigger('changed_validated', self)) # self.version.on_changed(lambda v: self.trigger('changed_validated', self)) self.removed = False - @self.on('removed') + @self.on("removed") def removed_changed(): self.removed = not self.removed diff --git a/pros/conductor/project/ProjectReport.py b/pros/conductor/project/ProjectReport.py index 6af81707..683964d9 100644 --- a/pros/conductor/project/ProjectReport.py +++ b/pros/conductor/project/ProjectReport.py @@ -2,7 +2,7 @@ class ProjectReport(object): - def __init__(self, project: 'Project'): + def __init__(self, project: "Project"): self.project = { "target": project.target, "location": os.path.abspath(project.location), @@ -18,9 +18,9 @@ def __str__(self): s = ( f'PROS Project for {self.project["target"]} at: {self.project["location"]}' f' ({self.project["name"]})' if self.project["name"] - else '' + else "" ) - s += '\n' + s += "\n" rows = [t.values() for t in self.project["templates"]] headers = [h.capitalize() for h in self.project["templates"][0].keys()] s += tabulate.tabulate(rows, headers=headers) diff --git a/pros/conductor/project/ProjectTransaction.py b/pros/conductor/project/ProjectTransaction.py index 8ea963b7..973cedd5 100644 --- a/pros/conductor/project/ProjectTransaction.py +++ b/pros/conductor/project/ProjectTransaction.py @@ -41,31 +41,31 @@ def execute(self, conductor: c.Conductor, project: c.Project): def describe(self, conductor: c.Conductor, project: c.Project): action = project.get_template_actions(conductor.resolve_template(self.template)) if action == TemplateAction.NotApplicable: - return f'{self.template.identifier} cannot be applied to project!' + return f"{self.template.identifier} cannot be applied to project!" if action == TemplateAction.Installable: - return f'{self.template.identifier} will installed to project.' + return f"{self.template.identifier} will installed to project." if action == TemplateAction.Downgradable: return ( - f'Project will be downgraded to {self.template.identifier} from' - f' {project.templates[self.template.name].version}.' + f"Project will be downgraded to {self.template.identifier} from" + f" {project.templates[self.template.name].version}." ) if action == TemplateAction.Upgradable: return ( - f'Project will be upgraded to {self.template.identifier} from' - f' {project.templates[self.template.name].version}.' + f"Project will be upgraded to {self.template.identifier} from" + f" {project.templates[self.template.name].version}." ) if action == TemplateAction.AlreadyInstalled: - if self.apply_kwargs.get('force_apply'): - return f'{self.template.identifier} will be re-applied.' + if self.apply_kwargs.get("force_apply"): + return f"{self.template.identifier} will be re-applied." elif self.suppress_already_installed: - return f'{self.template.identifier} will not be re-applied.' + return f"{self.template.identifier} will not be re-applied." else: - return f'{self.template.identifier} cannot be applied to project because it is already installed.' + return f"{self.template.identifier} cannot be applied to project because it is already installed." def can_execute(self, conductor: c.Conductor, project: c.Project) -> bool: action = project.get_template_actions(conductor.resolve_template(self.template)) if action == TemplateAction.AlreadyInstalled: - return self.apply_kwargs.get('force_apply') or self.suppress_already_installed + return self.apply_kwargs.get("force_apply") or self.suppress_already_installed return action in [TemplateAction.Installable, TemplateAction.Downgradable, TemplateAction.Upgradable] @@ -87,7 +87,7 @@ def execute(self, conductor: c.Conductor, project: c.Project): ui.logger(__name__).warning(str(e)) def describe(self, conductor: c.Conductor, project: c.Project) -> str: - return f'{self.template.identifier} will be removed' + return f"{self.template.identifier} will be removed" def can_execute(self, conductor: c.Conductor, project: c.Project): return True @@ -119,15 +119,15 @@ def add_action(self, action: Action) -> None: def execute(self): if len(self.actions) == 0: - ui.logger(__name__).warning('No actions necessary.') + ui.logger(__name__).warning("No actions necessary.") return location = self.project.location - tfd, tfn = tempfile.mkstemp(prefix='pros-project-', suffix=f'-{self.project.name}.zip', text='w+b') - with os.fdopen(tfd, 'w+b') as tf: - with zipfile.ZipFile(tf, mode='w') as zf: - files, length = it.tee(location.glob('**/*'), 2) + tfd, tfn = tempfile.mkstemp(prefix="pros-project-", suffix=f"-{self.project.name}.zip", text="w+b") + with os.fdopen(tfd, "w+b") as tf: + with zipfile.ZipFile(tf, mode="w") as zf: + files, length = it.tee(location.glob("**/*"), 2) length = len(list(length)) - with ui.progressbar(files, length=length, label=f'Backing up {self.project.name} to {tfn}') as pb: + with ui.progressbar(files, length=length, label=f"Backing up {self.project.name} to {tfn}") as pb: for file in pb: zf.write(file, arcname=file.relative_to(location)) @@ -136,21 +136,21 @@ def execute(self): for action in self.actions: ui.logger(__name__).debug(action.describe(self.conductor, self.project)) rv = action.execute(self.conductor, self.project) - ui.logger(__name__).debug(f'{action} returned {rv}') + ui.logger(__name__).debug(f"{action} returned {rv}") if rv is not None and not rv: - raise ValueError('Action did not complete successfully') - ui.echo('All actions performed successfully') + raise ValueError("Action did not complete successfully") + ui.echo("All actions performed successfully") except Exception as e: - ui.logger(__name__).warning(f'Failed to perform transaction, restoring project to previous state') + ui.logger(__name__).warning(f"Failed to perform transaction, restoring project to previous state") with zipfile.ZipFile(tfn) as zf: - with ui.progressbar(zf.namelist(), label=f'Restoring {self.project.name} from {tfn}') as pb: + with ui.progressbar(zf.namelist(), label=f"Restoring {self.project.name} from {tfn}") as pb: for file in pb: zf.extract(file, path=location) ui.logger(__name__).exception(e) finally: - ui.echo(f'Removing {tfn}') + ui.echo(f"Removing {tfn}") os.remove(tfn) def apply_template(self, template: c.BaseTemplate, suppress_already_installed: bool = False, **kwargs): @@ -168,9 +168,9 @@ def change_name(self, new_name: str): def describe(self) -> str: if len(self.actions) > 0: - return '\n'.join(f'- {a.describe(self.conductor, self.project)}' for a in self.actions) + return "\n".join(f"- {a.describe(self.conductor, self.project)}" for a in self.actions) else: - return 'No actions necessary.' + return "No actions necessary." def can_execute(self) -> bool: return all(a.can_execute(self.conductor, self.project) for a in self.actions) diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index cb0509f7..4d262587 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -17,7 +17,7 @@ class Project(Config): - def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = True, defaults: dict = None): + def __init__(self, path: str = ".", create: bool = False, raise_on_error: bool = True, defaults: dict = None): """ Instantiates a PROS project configuration :param path: A path to the project, may be the actual project.pros file, any child directory of the project, @@ -27,27 +27,27 @@ def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = :param raise_on_error: :param defaults: """ - file = Project.find_project(path or '.') + file = Project.find_project(path or ".") if file is None and create: - file = os.path.join(path, 'project.pros') if not os.path.basename(path) == 'project.pros' else path + file = os.path.join(path, "project.pros") if not os.path.basename(path) == "project.pros" else path elif file is None and raise_on_error: - raise ConfigNotFoundException('A project config was not found for {}'.format(path)) + raise ConfigNotFoundException("A project config was not found for {}".format(path)) if defaults is None: defaults = {} - self.target: str = defaults.get('target', 'cortex').lower() # VEX Hardware target (V5/Cortex) - self.templates: Dict[str, Template] = defaults.get('templates', {}) - self.upload_options: Dict = defaults.get('upload_options', {}) - self.project_name: str = defaults.get('project_name', None) + self.target: str = defaults.get("target", "cortex").lower() # VEX Hardware target (V5/Cortex) + self.templates: Dict[str, Template] = defaults.get("templates", {}) + self.upload_options: Dict = defaults.get("upload_options", {}) + self.project_name: str = defaults.get("project_name", None) super(Project, self).__init__(file, error_on_decode=raise_on_error) - if 'kernel' in self.__dict__: + if "kernel" in self.__dict__: # Add backwards compatibility with PROS CLI 2 projects by adding kernel as a pseudo-template - self.templates['kernel'] = Template( + self.templates["kernel"] = Template( user_files=self.all_files, - name='kernel', - version=self.__dict__['kernel'], + name="kernel", + version=self.__dict__["kernel"], target=self.target, - output='bin/output.bin', + output="bin/output.bin", ) @property @@ -63,13 +63,13 @@ def name(self): return ( self.project_name or os.path.basename(self.location) - or os.path.basename(self.templates['kernel'].metadata['output']) - or 'pros' + or os.path.basename(self.templates["kernel"].metadata["output"]) + or "pros" ) @property def all_files(self) -> Set[str]: - return {os.path.relpath(p, self.location) for p in glob.glob(f'{self.location}/**/*', recursive=True)} + return {os.path.relpath(p, self.location) for p in glob.glob(f"{self.location}/**/*", recursive=True)} def get_template_actions(self, template: BaseTemplate) -> TemplateAction: ui.logger(__name__).debug(template) @@ -77,7 +77,7 @@ def get_template_actions(self, template: BaseTemplate) -> TemplateAction: return TemplateAction.NotApplicable from semantic_version import Spec, Version - if template.name != 'kernel' and Version(self.kernel) not in Spec(template.supported_kernels or '>0'): + if template.name != "kernel" and Version(self.kernel) not in Spec(template.supported_kernels or ">0"): if template.name in self.templates.keys(): return TemplateAction.AlreadyInstalled return TemplateAction.NotApplicable @@ -128,7 +128,7 @@ def apply_template( installed_user_files = set() for lib_name, lib in self.templates.items(): if lib_name == template.name or lib.name == template.name: - logger(__name__).debug(f'{lib} is already installed') + logger(__name__).debug(f"{lib} is already installed") logger(__name__).debug(lib.system_files) logger(__name__).debug(lib.user_files) transaction.extend_rm(lib.system_files) @@ -140,14 +140,14 @@ def apply_template( deprecated_user_files = installed_user_files.intersection(self.all_files) - set(template.user_files) if any(deprecated_user_files): if force_user or confirm( - f'The following user files have been deprecated: {deprecated_user_files}. ' - f'Do you want to update them?' + f"The following user files have been deprecated: {deprecated_user_files}. " + f"Do you want to update them?" ): transaction.extend_rm(deprecated_user_files) else: logger(__name__).warning( - f'Deprecated user files may cause weird quirks. See migration guidelines from ' - f'{template.identifier}\'s release notes.' + f"Deprecated user files may cause weird quirks. See migration guidelines from " + f"{template.identifier}'s release notes." ) # Carry forward deprecated user files into the template about to be applied so that user gets warned in # future. @@ -177,22 +177,22 @@ def new_user_filter(new_file: str) -> bool: if any([file in transaction.effective_state for file in template.system_files]) and not force_system: confirm( - f'Some required files for {template.identifier} already exist in the project. ' - f'Overwrite the existing files?', + f"Some required files for {template.identifier} already exist in the project. " + f"Overwrite the existing files?", abort=True, ) transaction.extend_add(template.system_files, template.location) logger(__name__).debug(transaction) - transaction.commit(label=f'Applying {template.identifier}', remove_empty_directories=remove_empty_directories) + transaction.commit(label=f"Applying {template.identifier}", remove_empty_directories=remove_empty_directories) self.templates[template.name] = template self.save() def remove_template(self, template: Template, remove_user: bool = False, remove_empty_directories: bool = True): if not self.template_is_installed(template): - raise ValueError(f'{template.identifier} is not installed on this project.') - if template.name == 'kernel': - raise ValueError(f'Cannot remove the kernel template. Maybe create a new project?') + raise ValueError(f"{template.identifier} is not installed on this project.") + if template.name == "kernel": + raise ValueError(f"Cannot remove the kernel template. Maybe create a new project?") real_template = LocalTemplate(orig=template, location=self.location) transaction = Transaction(self.location, set(self.all_files)) @@ -201,7 +201,7 @@ def remove_template(self, template: Template, remove_user: bool = False, remove_ transaction.extend_rm(real_template.real_user_files) logger(__name__).debug(transaction) transaction.commit( - label=f'Removing {template.identifier}...', remove_empty_directories=remove_empty_directories + label=f"Removing {template.identifier}...", remove_empty_directories=remove_empty_directories ) del self.templates[real_template.name] self.save() @@ -223,39 +223,39 @@ def resolve_template(self, query: Union[str, BaseTemplate]) -> List[Template]: def __str__(self): return ( - f'Project: {self.location} ({self.name}) for {self.target} with ' + f"Project: {self.location} ({self.name}) for {self.target} with " f'{", ".join([str(t) for t in self.templates.values()])}' ) @property def kernel(self): - if 'kernel' in self.templates: - return self.templates['kernel'].version - elif hasattr(self.__dict__, 'kernel'): - return self.__dict__['kernel'] - return '' + if "kernel" in self.templates: + return self.templates["kernel"].version + elif hasattr(self.__dict__, "kernel"): + return self.__dict__["kernel"] + return "" @property def output(self): - if 'kernel' in self.templates: - return self.templates['kernel'].metadata['output'] - elif hasattr(self.__dict__, 'output'): - return self.__dict__['output'] - return 'bin/output.bin' + if "kernel" in self.templates: + return self.templates["kernel"].metadata["output"] + elif hasattr(self.__dict__, "output"): + return self.__dict__["output"] + return "bin/output.bin" def make(self, build_args: List[str]): import subprocess env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred - if os.environ.get('PROS_TOOLCHAIN'): - env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + env["PATH"] = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + env["PATH"] # call make.exe if on Windows - if os.name == 'nt' and os.environ.get('PROS_TOOLCHAIN'): - make_cmd = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin', 'make.exe') + if os.name == "nt" and os.environ.get("PROS_TOOLCHAIN"): + make_cmd = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin", "make.exe") else: - make_cmd = 'make' + make_cmd = "make" stdout_pipe = EchoPipe() stderr_pipe = EchoPipe(err=True) process = None @@ -269,12 +269,12 @@ def make(self, build_args: List[str]): stderr=stderr_pipe, ) except Exception as e: - if not os.environ.get('PROS_TOOLCHAIN'): + if not os.environ.get("PROS_TOOLCHAIN"): ui.logger(__name__).warn( "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" ) ui.logger(__name__).error( - f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={'sentry': False} + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={"sentry": False} ) stdout_pipe.close() stderr_pipe.close() @@ -303,7 +303,7 @@ def make_scan_build( td = tempfile.TemporaryDirectory() td_path = td.name.replace("\\", "/") - build_args = [*build_args, f'BINDIR={td_path}'] + build_args = [*build_args, f"BINDIR={td_path}"] def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compilation]]: """ @@ -321,17 +321,17 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil setup_environment, ) - with temporary_directory(prefix='intercept-') as tmp_dir: + with temporary_directory(prefix="intercept-") as tmp_dir: # run the build command environment = setup_environment(args, tmp_dir) - if os.environ.get('PROS_TOOLCHAIN'): - environment['PATH'] = ( - os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + environment['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + environment["PATH"] = ( + os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + environment["PATH"] ) - if sys.platform == 'darwin': - environment['PATH'] = ( - os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + environment['PATH'] + if sys.platform == "darwin": + environment["PATH"] = ( + os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + environment["PATH"] ) if not suppress_output: @@ -343,12 +343,12 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil try: exit_code = run_build(args.build, env=environment, stdout=pipe, stderr=pipe, cwd=self.directory) except Exception as e: - if not os.environ.get('PROS_TOOLCHAIN'): + if not os.environ.get("PROS_TOOLCHAIN"): ui.logger(__name__).warn( "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" ) ui.logger(__name__).error( - f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={'sentry': False} + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={"sentry": False} ) if not suppress_output: pipe.close() @@ -362,21 +362,21 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil return exit_code, iter(set(current)) # call make.exe if on Windows - if os.name == 'nt' and os.environ.get('PROS_TOOLCHAIN'): - make_cmd = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin', 'make.exe') + if os.name == "nt" and os.environ.get("PROS_TOOLCHAIN"): + make_cmd = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin", "make.exe") else: - make_cmd = 'make' + make_cmd = "make" args = create_intercept_parser().parse_args( [ - '--override-compiler', - '--use-cc', - 'arm-none-eabi-gcc', - '--use-c++', - 'arm-none-eabi-g++', + "--override-compiler", + "--use-cc", + "arm-none-eabi-gcc", + "--use-c++", + "arm-none-eabi-g++", make_cmd, *build_args, - 'CC=intercept-cc', - 'CXX=intercept-c++', + "CC=intercept-cc", + "CXX=intercept-c++", ] ) exit_code, entries = libscanbuild_capture(args) @@ -388,47 +388,47 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil if not any(any_entries): return exit_code if not suppress_output: - ui.echo('Capturing metadata for PROS Editor...') + ui.echo("Capturing metadata for PROS Editor...") env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred - if os.environ.get('PROS_TOOLCHAIN'): - env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + env["PATH"] = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + env["PATH"] cc_sysroot = subprocess.run( - [make_cmd, 'cc-sysroot'], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + [make_cmd, "cc-sysroot"], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory ) lines = str(cc_sysroot.stderr.decode()).splitlines() + str(cc_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cc_sysroot_includes = [] copy = False for line in lines: - if line == '#include <...> search starts here:': + if line == "#include <...> search starts here:": copy = True continue - if line == 'End of search list.': + if line == "End of search list.": copy = False continue if copy: - cc_sysroot_includes.append(f'-isystem{line}') + cc_sysroot_includes.append(f"-isystem{line}") cxx_sysroot = subprocess.run( - [make_cmd, 'cxx-sysroot'], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + [make_cmd, "cxx-sysroot"], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory ) lines = str(cxx_sysroot.stderr.decode()).splitlines() + str(cxx_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cxx_sysroot_includes = [] copy = False for line in lines: - if line == '#include <...> search starts here:': + if line == "#include <...> search starts here:": copy = True continue - if line == 'End of search list.': + if line == "End of search list.": copy = False continue if copy: - cxx_sysroot_includes.append(f'-isystem{line}') + cxx_sysroot_includes.append(f"-isystem{line}") new_entries, entries = itertools.tee(entries, 2) new_sources = set([e.source for e in entries]) if not cdb_file: - cdb_file = os.path.join(self.directory, 'compile_commands.json') + cdb_file = os.path.join(self.directory, "compile_commands.json") if isinstance(cdb_file, str) and os.path.isfile(cdb_file): old_entries = itertools.filterfalse( lambda entry: entry.source in new_sources, CompilationDatabase.load(cdb_file) @@ -436,19 +436,19 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil else: old_entries = [] - extra_flags = ['-target', 'armv7ar-none-none-eabi'] - logger(__name__).debug('cc_sysroot_includes') + extra_flags = ["-target", "armv7ar-none-none-eabi"] + logger(__name__).debug("cc_sysroot_includes") logger(__name__).debug(cc_sysroot_includes) - logger(__name__).debug('cxx_sysroot_includes') + logger(__name__).debug("cxx_sysroot_includes") logger(__name__).debug(cxx_sysroot_includes) - if sys.platform == 'win32': + if sys.platform == "win32": extra_flags.extend(["-fno-ms-extensions", "-fno-ms-compatibility", "-fno-delayed-template-parsing"]) def new_entry_map(entry): - if entry.compiler == 'c': + if entry.compiler == "c": entry.flags = extra_flags + cc_sysroot_includes + entry.flags - elif entry.compiler == 'c++': + elif entry.compiler == "c++": entry.flags = extra_flags + cxx_sysroot_includes + entry.flags return entry @@ -456,13 +456,13 @@ def new_entry_map(entry): def entry_map(entry: Compilation): json_entry = entry.as_db_entry() - json_entry['arguments'][0] = 'clang' if entry.compiler == 'c' else 'clang++' + json_entry["arguments"][0] = "clang" if entry.compiler == "c" else "clang++" return json_entry entries = itertools.chain(old_entries, new_entries) json_entries = list(map(entry_map, entries)) if isinstance(cdb_file, str): - cdb_file = open(cdb_file, 'w') + cdb_file = open(cdb_file, "w") import json json.dump(json_entries, cdb_file, sort_keys=True, indent=4) @@ -478,7 +478,7 @@ def compile(self, build_args: List[str], scan_build: Optional[bool] = None): @staticmethod def find_project(path: str, recurse_times: int = 10): - path = os.path.abspath(path or '.') + path = os.path.abspath(path or ".") if os.path.isfile(path): path = os.path.dirname(path) if os.path.isdir(path): @@ -487,10 +487,10 @@ def find_project(path: str, recurse_times: int = 10): files = [ f for f in os.listdir(path) - if os.path.isfile(os.path.join(path, f)) and f.lower() == 'project.pros' + if os.path.isfile(os.path.join(path, f)) and f.lower() == "project.pros" ] if len(files) == 1: # found a project.pros file! - logger(__name__).info(f'Found Project Path: {os.path.join(path, files[0])}') + logger(__name__).info(f"Found Project Path: {os.path.join(path, files[0])}") return os.path.join(path, files[0]) path = os.path.dirname(path) else: @@ -498,4 +498,4 @@ def find_project(path: str, recurse_times: int = 10): return None -__all__ = ['Project', 'ProjectReport'] +__all__ = ["Project", "ProjectReport"] diff --git a/pros/conductor/templates/base_template.py b/pros/conductor/templates/base_template.py index 95a19064..2eb7d6ad 100644 --- a/pros/conductor/templates/base_template.py +++ b/pros/conductor/templates/base_template.py @@ -12,20 +12,20 @@ def __init__(self, **kwargs): self.supported_kernels: str = None self.target: str = None self.metadata: Dict[str, Any] = {} - if 'orig' in kwargs: - self.__dict__.update({k: v for k, v in kwargs.pop('orig').__dict__.items() if k in self.__dict__}) + if "orig" in kwargs: + self.__dict__.update({k: v for k, v in kwargs.pop("orig").__dict__.items() if k in self.__dict__}) self.__dict__.update({k: v for k, v in kwargs.items() if k in self.__dict__}) self.metadata.update({k: v for k, v in kwargs.items() if k not in self.__dict__}) - if 'depot' in self.metadata and 'origin' not in self.metadata: - self.metadata['origin'] = self.metadata.pop('depot') - if 'd' in self.metadata and 'depot' not in self.metadata: - self.metadata['depot'] = self.metadata.pop('d') - if 'l' in self.metadata and 'location' not in self.metadata: - self.metadata['location'] = self.metadata.pop('l') - if self.name == 'pros': - self.name = 'kernel' + if "depot" in self.metadata and "origin" not in self.metadata: + self.metadata["origin"] = self.metadata.pop("depot") + if "d" in self.metadata and "depot" not in self.metadata: + self.metadata["depot"] = self.metadata.pop("d") + if "l" in self.metadata and "location" not in self.metadata: + self.metadata["location"] = self.metadata.pop("l") + if self.name == "pros": + self.name = "kernel" - def satisfies(self, query: 'BaseTemplate', kernel_version: Union[str, Version] = None) -> bool: + def satisfies(self, query: "BaseTemplate", kernel_version: Union[str, Version] = None) -> bool: if query.name and self.name != query.name: return False if query.target and self.target != query.target: @@ -47,7 +47,7 @@ def satisfies(self, query: 'BaseTemplate', kernel_version: Union[str, Version] = def __str__(self): fields = [self.metadata.get("origin", None), self.target, self.__class__.__name__] additional = ", ".join(map(str, filter(bool, fields))) - return f'{self.identifier} ({additional})' + return f"{self.identifier} ({additional})" def __gt__(self, other): if isinstance(other, BaseTemplate): @@ -65,30 +65,30 @@ def __eq__(self, other): def __hash__(self): return self.identifier.__hash__() - def as_query(self, version='>0', metadata=False, **kwargs): + def as_query(self, version=">0", metadata=False, **kwargs): if isinstance(metadata, bool) and not metadata: metadata = dict() return BaseTemplate(orig=self, version=version, metadata=metadata, **kwargs) @property def identifier(self): - return f'{self.name}@{self.version}' + return f"{self.name}@{self.version}" @property def origin(self): - return self.metadata.get('origin', 'Unknown') + return self.metadata.get("origin", "Unknown") @classmethod - def create_query(cls, name: str = None, **kwargs) -> 'BaseTemplate': + def create_query(cls, name: str = None, **kwargs) -> "BaseTemplate": if not isinstance(name, str): return cls(**kwargs) - if name.count('@') > 1: - raise ValueError(f'Malformed identifier: {name}') - if '@' in name: - name, kwargs['version'] = name.split('@') - if kwargs.get('version', 'latest') == 'latest': - kwargs['version'] = '>=0' - if name == 'kernal': + if name.count("@") > 1: + raise ValueError(f"Malformed identifier: {name}") + if "@" in name: + name, kwargs["version"] = name.split("@") + if kwargs.get("version", "latest") == "latest": + kwargs["version"] = ">=0" + if name == "kernal": ui.echo("Assuming 'kernal' is the British spelling of kernel.") - name = 'kernel' + name = "kernel" return cls(name=name, **kwargs) diff --git a/pros/conductor/templates/external_template.py b/pros/conductor/templates/external_template.py index ce08662e..870ef28d 100644 --- a/pros/conductor/templates/external_template.py +++ b/pros/conductor/templates/external_template.py @@ -10,18 +10,18 @@ class ExternalTemplate(Config, Template): def __init__(self, file: str, **kwargs): if os.path.isdir(file): - file = os.path.join(file, 'template.pros') + file = os.path.join(file, "template.pros") elif zipfile.is_zipfile(file): self.tf = tempfile.NamedTemporaryFile(delete=False) with zipfile.ZipFile(file) as zf: - with zf.open('template.pros') as zt: + with zf.open("template.pros") as zt: self.tf.write(zt.read()) self.tf.seek(0, 0) file = self.tf.name - error_on_decode = kwargs.pop('error_on_decode', False) + error_on_decode = kwargs.pop("error_on_decode", False) Template.__init__(self, **kwargs) Config.__init__(self, file, error_on_decode=error_on_decode) def __del__(self): - if hasattr(self, 'tr'): + if hasattr(self, "tr"): del self.tf diff --git a/pros/conductor/templates/local_template.py b/pros/conductor/templates/local_template.py index 53d66e73..d3be5b34 100644 --- a/pros/conductor/templates/local_template.py +++ b/pros/conductor/templates/local_template.py @@ -4,7 +4,7 @@ def _fix_path(*paths: str) -> str: - return os.path.normpath(os.path.join(*paths).replace('\\', '/')) + return os.path.normpath(os.path.join(*paths).replace("\\", "/")) class LocalTemplate(Template): diff --git a/pros/conductor/transaction.py b/pros/conductor/transaction.py index 0fcb05d7..4b7ba650 100644 --- a/pros/conductor/transaction.py +++ b/pros/conductor/transaction.py @@ -19,7 +19,7 @@ def extend_add(self, paths: Iterable[str], src: str): self.add(path, src) def add(self, path: str, src: str): - path = os.path.normpath(path.replace('\\', '/')) + path = os.path.normpath(path.replace("\\", "/")) self._add_files.add(path) self.effective_state.add(path) self._add_srcs[path] = src @@ -31,7 +31,7 @@ def extend_rm(self, paths: Iterable[str]): self.rm(path) def rm(self, path: str): - path = os.path.normpath(path.replace('\\', '/')) + path = os.path.normpath(path.replace("\\", "/")) self._rm_files.add(path) if path in self.effective_state: self.effective_state.remove(path) @@ -39,18 +39,18 @@ def rm(self, path: str): self._add_files.remove(path) self._add_srcs.pop(path) - def commit(self, label: str = 'Committing transaction', remove_empty_directories: bool = True): + def commit(self, label: str = "Committing transaction", remove_empty_directories: bool = True): with ui.progressbar(length=len(self._rm_files) + len(self._add_files), label=label) as pb: - for file in sorted(self._rm_files, key=lambda p: p.count('/') + p.count('\\'), reverse=True): + for file in sorted(self._rm_files, key=lambda p: p.count("/") + p.count("\\"), reverse=True): file_path = os.path.join(self.location, file) if os.path.isfile(file_path): - logger(__name__).info(f'Removing {file}') + logger(__name__).info(f"Removing {file}") os.remove(os.path.join(self.location, file)) else: - logger(__name__).info(f'Not removing nonexistent {file}') + logger(__name__).info(f"Not removing nonexistent {file}") pardir = os.path.abspath(os.path.join(file_path, os.pardir)) while remove_empty_directories and len(os.listdir(pardir)) == 0: - logger(__name__).info(f'Removing {os.path.relpath(pardir, self.location)}') + logger(__name__).info(f"Removing {os.path.relpath(pardir, self.location)}") os.rmdir(pardir) pardir = os.path.abspath(os.path.join(pardir, os.pardir)) if pardir == self.location: @@ -63,13 +63,13 @@ def commit(self, label: str = 'Committing transaction', remove_empty_directories destination = os.path.join(self.location, file) if os.path.isfile(source): if not os.path.isdir(os.path.dirname(destination)): - logger(__name__).debug(f'Creating directories: f{destination}') + logger(__name__).debug(f"Creating directories: f{destination}") os.makedirs(os.path.dirname(destination), exist_ok=True) - logger(__name__).info(f'Adding {file}') + logger(__name__).info(f"Adding {file}") shutil.copy(os.path.join(self._add_srcs[file], file), os.path.join(self.location, file)) else: logger(__name__).info(f"Not copying {file} because {source} doesn't exist.") pb.update(1) def __str__(self): - return f'Transaction Object: ADD: {self._add_files}\tRM: {self._rm_files}\tLocation: {self.location}' + return f"Transaction Object: ADD: {self._add_files}\tRM: {self._rm_files}\tLocation: {self.location}" diff --git a/pros/config/cli_config.py b/pros/config/cli_config.py index 1600b146..45f18a33 100644 --- a/pros/config/cli_config.py +++ b/pros/config/cli_config.py @@ -17,7 +17,7 @@ class CliConfig(Config): def __init__(self, file=None): if not file: - file = os.path.join(click.get_app_dir('PROS'), 'cli.pros') + file = os.path.join(click.get_app_dir("PROS"), "cli.pros") self.update_frequency: timedelta = timedelta(hours=1) self.override_use_build_compile_commands: Optional[bool] = None self.offer_sentry: Optional[bool] = None @@ -31,19 +31,19 @@ def needs_online_fetch(self, last_fetch: datetime) -> bool: def use_build_compile_commands(self): if self.override_use_build_compile_commands is not None: return self.override_use_build_compile_commands - paths = [os.path.join('~', '.pros-atom'), os.path.join('~', '.pros-editor')] + paths = [os.path.join("~", ".pros-atom"), os.path.join("~", ".pros-editor")] return any([os.path.exists(os.path.expanduser(p)) for p in paths]) - def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifestV1']: + def get_upgrade_manifest(self, force: bool = False) -> Optional["UpgradeManifestV1"]: from pros.upgrade.manifests.upgrade_manifest_v1 import UpgradeManifestV1 # noqa: F811 if not force and not self.needs_online_fetch(self.cached_upgrade[0]): return self.cached_upgrade[1] - pros.common.logger(__name__).info('Fetching upgrade manifest...') + pros.common.logger(__name__).info("Fetching upgrade manifest...") import jsonpickle import requests - r = requests.get('https://purduesigbots.github.io/pros-mainline/cli-updates.json') + r = requests.get("https://purduesigbots.github.io/pros-mainline/cli-updates.json") pros.common.logger(__name__).debug(r) if r.status_code == 200: try: @@ -55,7 +55,7 @@ def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifest self.save() return self.cached_upgrade[1] else: - pros.common.logger(__name__).warning(f'Failed to fetch CLI updates because status code: {r.status_code}') + pros.common.logger(__name__).warning(f"Failed to fetch CLI updates because status code: {r.status_code}") pros.common.logger(__name__).debug(r) return None @@ -66,6 +66,6 @@ def cli_config() -> CliConfig: return CliConfig() ctx.ensure_object(dict) assert isinstance(ctx.obj, dict) - if not hasattr(ctx.obj, 'cli_config') or not isinstance(ctx.obj['cli_config'], CliConfig): - ctx.obj['cli_config'] = CliConfig() - return ctx.obj['cli_config'] + if not hasattr(ctx.obj, "cli_config") or not isinstance(ctx.obj["cli_config"], CliConfig): + ctx.obj["cli_config"] = CliConfig() + return ctx.obj["cli_config"] diff --git a/pros/config/config.py b/pros/config/config.py index 59cda986..b0bb111e 100644 --- a/pros/config/config.py +++ b/pros/config/config.py @@ -17,40 +17,40 @@ class Config(object): """ def __init__(self, file, error_on_decode=False): - logger(__name__).debug('Opening {} ({})'.format(file, self.__class__.__name__)) + logger(__name__).debug("Opening {} ({})".format(file, self.__class__.__name__)) self.save_file = file # __ignored property has any fields which shouldn't be included the pickled config file - self.__ignored = self.__dict__.get('_Config__ignored', []) - self.__ignored.append('save_file') - self.__ignored.append('_Config__ignored') + self.__ignored = self.__dict__.get("_Config__ignored", []) + self.__ignored.append("save_file") + self.__ignored.append("_Config__ignored") if file: # If the file already exists, update this new config with the values in the file if os.path.isfile(file): - with open(file, 'r') as f: + with open(file, "r") as f: try: result = jsonpickle.decode(f.read()) if isinstance(result, dict): - if 'py/state' in result: - class_name = '{}.{}'.format(self.__class__.__module__, self.__class__.__qualname__) - logger(__name__).debug('Coercing {} to {}'.format(result['py/object'], class_name)) - old_object = result['py/object'] + if "py/state" in result: + class_name = "{}.{}".format(self.__class__.__module__, self.__class__.__qualname__) + logger(__name__).debug("Coercing {} to {}".format(result["py/object"], class_name)) + old_object = result["py/object"] try: - result['py/object'] = class_name + result["py/object"] = class_name result = jsonpickle.unpickler.Unpickler().restore(result) except (json.decoder.JSONDecodeError, AttributeError) as e: logger(__name__).debug(e) logger(__name__).warning( - f'Couldn\'t coerce {file} ({old_object}) to ' - f'{class_name}. Using rudimentary coercion' + f"Couldn't coerce {file} ({old_object}) to " + f"{class_name}. Using rudimentary coercion" ) - self.__dict__.update(result['py/state']) + self.__dict__.update(result["py/state"]) else: self.__dict__.update(result) elif isinstance(result, object): self.__dict__.update(result.__dict__) except (json.decoder.JSONDecodeError, AttributeError, UnicodeDecodeError) as e: if error_on_decode: - logger(__name__).error(f'Error parsing {file}') + logger(__name__).error(f"Error parsing {file}") logger(__name__).exception(e) raise e else: @@ -58,7 +58,7 @@ def __init__(self, file, error_on_decode=False): pass # obvious elif os.path.isdir(file): - raise ValueError('{} must be a file, not a directory'.format(file)) + raise ValueError("{} must be a file, not a directory".format(file)) # The file didn't exist when we created, so we'll save the default values else: try: @@ -68,7 +68,7 @@ def __init__(self, file, error_on_decode=False): logger(__name__).exception(e) raise e else: - logger(__name__).debug('Failed to save {} ({})'.format(file, e)) + logger(__name__).debug("Failed to save {} ({})".format(file, e)) from pros.common.sentry import add_context @@ -76,7 +76,7 @@ def __init__(self, file, error_on_decode=False): def __getstate__(self): state = self.__dict__.copy() - if '_Config__ignored' in self.__dict__: + if "_Config__ignored" in self.__dict__: for key in [k for k in self.__ignored if k in state]: del state[key] return state @@ -85,7 +85,7 @@ def __setstate__(self, state): self.__dict__.update(state) def __str__(self): - jsonpickle.set_encoder_options('json', sort_keys=True) + jsonpickle.set_encoder_options("json", sort_keys=True) return jsonpickle.encode(self) def delete(self): @@ -95,12 +95,12 @@ def delete(self): def save(self, file: str = None) -> None: if file is None: file = self.save_file - jsonpickle.set_encoder_options('json', sort_keys=True, indent=4) + jsonpickle.set_encoder_options("json", sort_keys=True, indent=4) if os.path.dirname(file): os.makedirs(os.path.dirname(file), exist_ok=True) - with open(file, 'w') as f: + with open(file, "w") as f: f.write(jsonpickle.encode(self)) - logger(__name__).debug('Saved {}'.format(file)) + logger(__name__).debug("Saved {}".format(file)) def migrate(self, migration): for old, new in migration.iteritems(): diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index de7d806d..40202be1 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -7,8 +7,8 @@ import requests from requests_futures.sessions import FuturesSession -url = 'https://www.google-analytics.com/collect' -agent = 'pros-cli' +url = "https://www.google-analytics.com/collect" +agent = "pros-cli" """ PROS ANALYTICS CLASS @@ -27,9 +27,9 @@ def __init__(self): self.cli_config.save() self.sent = False # Variables that the class will use - self.gaID = self.cli_config.ga['ga_id'] - self.useAnalytics = self.cli_config.ga['enabled'] - self.uID = self.cli_config.ga['u_id'] + self.gaID = self.cli_config.ga["ga_id"] + self.useAnalytics = self.cli_config.ga["enabled"] + self.uID = self.cli_config.ga["u_id"] self.pendingRequests = [] def send(self, action): @@ -39,36 +39,36 @@ def send(self, action): try: # Payload to be sent to GA, idk what some of them are but it works payload = { - 'v': 1, - 'tid': self.gaID, - 'aip': 1, - 'z': random.random(), - 'cid': self.uID, - 't': 'event', - 'ec': 'action', - 'ea': action, - 'el': 'CLI', - 'ev': '1', - 'ni': 0, + "v": 1, + "tid": self.gaID, + "aip": 1, + "z": random.random(), + "cid": self.uID, + "t": "event", + "ec": "action", + "ea": action, + "el": "CLI", + "ev": "1", + "ni": 0, } session = FuturesSession() # Send payload to GA servers - future = session.post(url=url, data=payload, headers={'User-Agent': agent}, timeout=5.0) + future = session.post(url=url, data=payload, headers={"User-Agent": agent}, timeout=5.0) self.pendingRequests.append(future) except Exception: from pros.cli.common import logger logger(__name__).warning( - "Unable to send analytics. Do you have a stable internet connection?", extra={'sentry': False} + "Unable to send analytics. Do you have a stable internet connection?", extra={"sentry": False} ) def set_use(self, value: bool): # Sets if GA is being used or not self.useAnalytics = value - self.cli_config.ga['enabled'] = self.useAnalytics + self.cli_config.ga["enabled"] = self.useAnalytics self.cli_config.save() def process_requests(self): diff --git a/pros/serial/__init__.py b/pros/serial/__init__.py index 0177d021..36e16fa2 100644 --- a/pros/serial/__init__.py +++ b/pros/serial/__init__.py @@ -4,11 +4,11 @@ def bytes_to_str(arr): if isinstance(arr, str): arr = bytes(arr) - if hasattr(arr, '__iter__'): - return ''.join('{:02X} '.format(x) for x in arr).strip() + if hasattr(arr, "__iter__"): + return "".join("{:02X} ".format(x) for x in arr).strip() else: # actually just a single byte - return '0x{:02X}'.format(arr) + return "0x{:02X}".format(arr) -def decode_bytes_to_str(data: Union[bytes, bytearray], encoding: str = 'utf-8', errors: str = 'strict') -> str: - return data.split(b'\0', 1)[0].decode(encoding=encoding, errors=errors) +def decode_bytes_to_str(data: Union[bytes, bytearray], encoding: str = "utf-8", errors: str = "strict") -> str: + return data.split(b"\0", 1)[0].decode(encoding=encoding, errors=errors) diff --git a/pros/serial/devices/stream_device.py b/pros/serial/devices/stream_device.py index 2649af97..a285619c 100644 --- a/pros/serial/devices/stream_device.py +++ b/pros/serial/devices/stream_device.py @@ -42,7 +42,7 @@ def promiscuous(self, value: bool): pass def read(self) -> Tuple[bytes, bytes]: - return b'', self.port.read_all() + return b"", self.port.read_all() def write(self, data: Union[bytes, str]): self.port.write(data) diff --git a/pros/serial/devices/vex/cortex_device.py b/pros/serial/devices/vex/cortex_device.py index 13a35d96..189b3e86 100644 --- a/pros/serial/devices/vex/cortex_device.py +++ b/pros/serial/devices/vex/cortex_device.py @@ -33,11 +33,11 @@ def __init__(self, data: Tuple[bytes, ...]): def __str__(self): return ( - f' Tether: {str(self.flags)}\n' - f' Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V ' - f'(Backup: {self.backup_battery:1.2f} V)\n' - f'Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ ' - f'{self.joystick_battery:1.2f} V' + f" Tether: {str(self.flags)}\n" + f" Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V " + f"(Backup: {self.backup_battery:1.2f} V)\n" + f"Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ " + f"{self.joystick_battery:1.2f} V" ) class SystemStatusFlags(IntFlag): @@ -56,26 +56,26 @@ def andeq(a, b): return (a & b) == b if not self.value & self.TETH_BITS: - s = 'Serial w/VEXnet 1.0 Keys' + s = "Serial w/VEXnet 1.0 Keys" elif andeq(self.value, 0x01): - s = 'Serial w/VEXnet 1.0 Keys (turbo)' + s = "Serial w/VEXnet 1.0 Keys (turbo)" elif andeq(self.value, 0x04): - s = 'Serial w/VEXnet 2.0 Keys' + s = "Serial w/VEXnet 2.0 Keys" elif andeq(self.value, 0x05): - s = 'Serial w/VEXnet 2.0 Keys (download mode)' + s = "Serial w/VEXnet 2.0 Keys (download mode)" elif andeq(self.value, 0x10): - s = 'Serial w/ a USB Cable' + s = "Serial w/ a USB Cable" elif andeq(self.value, 0x20): - s = 'Directly w/ a USB Cable' + s = "Directly w/ a USB Cable" else: - s = 'Unknown' + s = "Unknown" if andeq(self.value, self.FCS_CONNECT): - s += ' - FCS Connected' + s += " - FCS Connected" return s def get_connected_device(self) -> SystemDevice: - logger(__name__).info('Interrogating Cortex...') + logger(__name__).info("Interrogating Cortex...") stm32 = STM32Device(self.port, do_negoitate=False) try: stm32.get(n_retries=1) @@ -84,21 +84,21 @@ def get_connected_device(self) -> SystemDevice: return self def upload_project(self, project: Project, **kwargs): - assert project.target == 'cortex' + assert project.target == "cortex" output_path = project.path.joinpath(project.output) if not output_path.exists(): - raise ui.dont_send(Exception('No output files were found! Have you built your project?')) - with output_path.open(mode='rb') as pf: + raise ui.dont_send(Exception("No output files were found! Have you built your project?")) + with output_path.open(mode="rb") as pf: return self.write_program(pf, **kwargs) def write_program(self, file: typing.BinaryIO, **kwargs): - action_string = '' - if hasattr(file, 'name'): - action_string += f' {Path(file.name).name}' - action_string += f' to Cortex on {self.port}' - ui.echo(f'Uploading {action_string}') + action_string = "" + if hasattr(file, "name"): + action_string += f" {Path(file.name).name}" + action_string += f" to Cortex on {self.port}" + ui.echo(f"Uploading {action_string}") - logger(__name__).info('Writing program to Cortex') + logger(__name__).info("Writing program to Cortex") status = self.query_system() logger(__name__).info(status) if not status.flags | self.SystemStatusFlags.TETH_USB and not status.flags | self.SystemStatusFlags.DL_MODE: @@ -107,25 +107,25 @@ def write_program(self, file: typing.BinaryIO, **kwargs): bootloader = self.expose_bootloader() rv = bootloader.write_program(file, **kwargs) - ui.finalize('upload', f'Finished uploading {action_string}') + ui.finalize("upload", f"Finished uploading {action_string}") return rv @retries def query_system(self) -> SystemStatus: - logger(__name__).info('Querying system information') + logger(__name__).info("Querying system information") rx = self._txrx_simple_struct(0x21, "<8B2x") status = CortexDevice.SystemStatus(rx) - ui.finalize('cortex-status', status) + ui.finalize("cortex-status", status) return status @retries def send_to_download_channel(self): - logger(__name__).info('Sending to download channel') + logger(__name__).info("Sending to download channel") self._txrx_ack_packet(0x35, timeout=1.0) @retries def expose_bootloader(self): - logger(__name__).info('Exposing bootloader') + logger(__name__).info("Exposing bootloader") for _ in itertools.repeat(None, 5): self._tx_packet(0x25) time.sleep(0.1) @@ -153,4 +153,4 @@ def _txrx_ack_packet(self, command: int, timeout=0.1): """ tx = self._tx_packet(command) self._rx_ack(timeout=timeout) - logger(__name__).debug('TX: {}'.format(bytes_to_str(tx))) + logger(__name__).debug("TX: {}".format(bytes_to_str(tx))) diff --git a/pros/serial/devices/vex/message.py b/pros/serial/devices/vex/message.py index f3ede25c..24139ff7 100644 --- a/pros/serial/devices/vex/message.py +++ b/pros/serial/devices/vex/message.py @@ -33,4 +33,4 @@ def __setitem__(self, key, value): self.bookmarks[key] = value def __str__(self): - return 'TX:{}\tRX:{}'.format(bytes_to_str(self.tx), bytes_to_str(self.rx)) + return "TX:{}\tRX:{}".format(bytes_to_str(self.tx), bytes_to_str(self.rx)) diff --git a/pros/serial/devices/vex/stm32_device.py b/pros/serial/devices/vex/stm32_device.py index 36e2321e..0907ef0f 100644 --- a/pros/serial/devices/vex/stm32_device.py +++ b/pros/serial/devices/vex/stm32_device.py @@ -33,7 +33,7 @@ def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: try: self.get(n_retries=0) except: - logger(__name__).info('Sending bootloader initialization') + logger(__name__).info("Sending bootloader initialization") time.sleep(0.01) self.port.rts = 0 for _ in itertools.repeat(None, times=3): @@ -47,13 +47,13 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft file.seek(0, 0) if file_len > (self.NUM_PAGES * self.PAGE_SIZE): raise VEXCommError( - f'File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)' + f"File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)" ) - if hasattr(file, 'name'): + if hasattr(file, "name"): display_name = file.name else: - display_name = '(memory)' + display_name = "(memory)" if not preserve_fs: self.erase_all() @@ -61,7 +61,7 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft self.erase_memory(list(range(0, int(file_len / self.PAGE_SIZE) + 1))) address = 0x08000000 - with ui.progressbar(length=file_len, label=f'Uploading {display_name}') as progress: + with ui.progressbar(length=file_len, label=f"Uploading {display_name}") as progress: for i in range(0, file_len, 256): write_size = 256 if i + 256 > file_len: @@ -78,77 +78,77 @@ def scan_prosfs(self): @retries def get(self): - logger(__name__).info('STM32: Get') + logger(__name__).info("STM32: Get") self._txrx_command(0x00) n_bytes = self.port.read(1)[0] assert n_bytes == 11 data = self.port.read(n_bytes + 1) - logger(__name__).info(f'STM32 Bootloader version 0x{data[0]:x}') + logger(__name__).info(f"STM32 Bootloader version 0x{data[0]:x}") self.commands = data[1:] - logger(__name__).debug(f'STM32 Bootloader commands are: {bytes_to_str(data[1:])}') + logger(__name__).debug(f"STM32 Bootloader commands are: {bytes_to_str(data[1:])}") assert self.port.read(1)[0] == self.ACK_BYTE @retries def get_read_protection_status(self): - logger(__name__).info('STM32: Get ID & Read Protection Status') + logger(__name__).info("STM32: Get ID & Read Protection Status") self._txrx_command(0x01) data = self.port.read(3) - logger(__name__).debug(f'STM32 Bootloader Get Version & Read Protection Status is: {bytes_to_str(data)}') + logger(__name__).debug(f"STM32 Bootloader Get Version & Read Protection Status is: {bytes_to_str(data)}") assert self.port.read(1)[0] == self.ACK_BYTE @retries def get_id(self): - logger(__name__).info('STM32: Get PID') + logger(__name__).info("STM32: Get PID") self._txrx_command(0x02) n_bytes = self.port.read(1)[0] pid = self.port.read(n_bytes + 1) - logger(__name__).debug(f'STM32 Bootloader PID is {pid}') + logger(__name__).debug(f"STM32 Bootloader PID is {pid}") @retries def read_memory(self, address: int, n_bytes: int): - logger(__name__).info(f'STM32: Read {n_bytes} fromo 0x{address:x}') + logger(__name__).info(f"STM32: Read {n_bytes} fromo 0x{address:x}") assert 255 >= n_bytes > 0 self._txrx_command(0x11) - self._txrx_command(struct.pack('>I', address)) + self._txrx_command(struct.pack(">I", address)) self._txrx_command(n_bytes) return self.port.read(n_bytes) @retries def go(self, start_address: int): - logger(__name__).info(f'STM32: Go 0x{start_address:x}') + logger(__name__).info(f"STM32: Go 0x{start_address:x}") self._txrx_command(0x21) try: - self._txrx_command(struct.pack('>I', start_address), timeout=5.0) + self._txrx_command(struct.pack(">I", start_address), timeout=5.0) except VEXCommError: logger(__name__).warning( - 'STM32 Bootloader did not acknowledge GO command. ' - 'The program may take a moment to begin running ' - 'or the device should be rebooted.' + "STM32 Bootloader did not acknowledge GO command. " + "The program may take a moment to begin running " + "or the device should be rebooted." ) @retries def write_memory(self, start_address: int, data: bytes): - logger(__name__).info(f'STM32: Write {len(data)} to 0x{start_address:x}') + logger(__name__).info(f"STM32: Write {len(data)} to 0x{start_address:x}") assert 0 < len(data) <= 256 if len(data) % 4 != 0: - data = data + (b'\0' * (4 - (len(data) % 4))) + data = data + (b"\0" * (4 - (len(data) % 4))) self._txrx_command(0x31) - self._txrx_command(struct.pack('>I', start_address)) + self._txrx_command(struct.pack(">I", start_address)) self._txrx_command(bytes([len(data) - 1, *data])) @retries def erase_all(self): - logger(__name__).info('STM32: Erase all pages') + logger(__name__).info("STM32: Erase all pages") if not self.commands[6] == 0x43: - raise VEXCommError('Standard erase not supported on this device (only extended erase)') + raise VEXCommError("Standard erase not supported on this device (only extended erase)") self._txrx_command(0x43) self._txrx_command(0xFF) @retries def erase_memory(self, page_numbers: List[int]): - logger(__name__).info(f'STM32: Erase pages: {page_numbers}') + logger(__name__).info(f"STM32: Erase pages: {page_numbers}") if not self.commands[6] == 0x43: - raise VEXCommError('Standard erase not supported on this device (only extended erase)') + raise VEXCommError("Standard erase not supported on this device (only extended erase)") assert 0 < len(page_numbers) <= 255 assert all([0 <= p <= 255 for p in page_numbers]) self._txrx_command(0x43) @@ -156,22 +156,22 @@ def erase_memory(self, page_numbers: List[int]): @retries def extended_erase(self, page_numbers: List[int]): - logger(__name__).info(f'STM32: Extended Erase pages: {page_numbers}') + logger(__name__).info(f"STM32: Extended Erase pages: {page_numbers}") if not self.commands[6] == 0x44: - raise IOError('Extended erase not supported on this device (only standard erase)') + raise IOError("Extended erase not supported on this device (only standard erase)") assert 0 < len(page_numbers) < 0xFFF0 assert all([0 <= p <= 0xFFFF for p in page_numbers]) self._txrx_command(0x44) - self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f'>{len(page_numbers)}H', *page_numbers)])) + self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f">{len(page_numbers)}H", *page_numbers)])) @retries def extended_erase_special(self, command: int): - logger(__name__).info(f'STM32: Extended special erase: {command:x}') + logger(__name__).info(f"STM32: Extended special erase: {command:x}") if not self.commands[6] == 0x44: - raise IOError('Extended erase not supported on this device (only standard erase)') + raise IOError("Extended erase not supported on this device (only standard erase)") assert 0xFFFD <= command <= 0xFFFF self._txrx_command(0x44) - self._txrx_command(struct.pack('>H', command)) + self._txrx_command(struct.pack(">H", command)) def _txrx_command(self, command: Union[int, bytes], timeout: float = 0.01, checksum: bool = True): self.port.read_all() @@ -180,15 +180,15 @@ def _txrx_command(self, command: Union[int, bytes], timeout: float = 0.01, check elif isinstance(command, int): message = bytearray([command, ~command & 0xFF] if checksum else [command]) else: - raise ValueError(f'Expected command to be bytes or int but got {type(command)}') - logger(__name__).debug(f'STM32 TX: {bytes_to_str(message)}') + raise ValueError(f"Expected command to be bytes or int but got {type(command)}") + logger(__name__).debug(f"STM32 TX: {bytes_to_str(message)}") self.port.write(message) self.port.flush() start_time = time.time() while time.time() - start_time < timeout: data = self.port.read(1) if data and len(data) == 1: - logger(__name__).debug(f'STM32 RX: {data[0]} =?= {self.ACK_BYTE}') + logger(__name__).debug(f"STM32 RX: {data[0]} =?= {self.ACK_BYTE}") if data[0] == self.ACK_BYTE: return raise VEXCommError(f"Device never ACK'd to {command}", command) diff --git a/pros/serial/devices/vex/v5_device.py b/pros/serial/devices/vex/v5_device.py index c0ee039c..1054f24e 100644 --- a/pros/serial/devices/vex/v5_device.py +++ b/pros/serial/devices/vex/v5_device.py @@ -38,7 +38,7 @@ def filter_vex_ports(p): p.vid is not None and p.vid in [0x2888, 0x0501] or p.name is not None - and ('VEX' in p.name or 'V5' in p.name) + and ("VEX" in p.name or "V5" in p.name) ) def filter_v5_ports(p, locations, names): @@ -55,14 +55,14 @@ def filter_v5_ports_mac(p, device): # Initially try filtering based off of location or the name of the device. # Special logic for macOS - if platform.system() == 'Darwin': - user_ports = [p for p in ports if filter_v5_ports_mac(p, '3')] - system_ports = [p for p in ports if filter_v5_ports_mac(p, '1')] - joystick_ports = [p for p in ports if filter_v5_ports_mac(p, '2')] + if platform.system() == "Darwin": + user_ports = [p for p in ports if filter_v5_ports_mac(p, "3")] + system_ports = [p for p in ports if filter_v5_ports_mac(p, "1")] + joystick_ports = [p for p in ports if filter_v5_ports_mac(p, "2")] else: - user_ports = [p for p in ports if filter_v5_ports(p, ['2'], ['User'])] - system_ports = [p for p in ports if filter_v5_ports(p, ['0'], ['System', 'Communications'])] - joystick_ports = [p for p in ports if filter_v5_ports(p, ['1'], ['Controller'])] + user_ports = [p for p in ports if filter_v5_ports(p, ["2"], ["User"])] + system_ports = [p for p in ports if filter_v5_ports(p, ["0"], ["System", "Communications"])] + joystick_ports = [p for p in ports if filter_v5_ports(p, ["1"], ["Controller"])] # Fallback for when a brain port's location is not detected properly if len(user_ports) != len(system_ports): @@ -72,31 +72,31 @@ def filter_v5_ports_mac(p, device): user_ports = [p for p in ports if p not in system_ports and p not in joystick_ports] if len(user_ports) == len(system_ports) and len(user_ports) > 0: - if p_type.lower() == 'user': + if p_type.lower() == "user": return user_ports - elif p_type.lower() == 'system': + elif p_type.lower() == "system": return system_ports + joystick_ports else: - raise ValueError(f'Invalid port type specified: {p_type}') + raise ValueError(f"Invalid port type specified: {p_type}") # None of the typical filters worked, so if there are only two ports, then the lower one is always* # the USER? port (*always = I haven't found a guarantee) if len(ports) == 2: # natural sort based on: https://stackoverflow.com/a/16090640 def natural_key(chunk: str): - return [int(text) if text.isdigit() else text.lower() for text in re.split(r'(\d+)', chunk)] + return [int(text) if text.isdigit() else text.lower() for text in re.split(r"(\d+)", chunk)] ports = sorted(ports, key=lambda p: natural_key(p.device)) - if p_type.lower() == 'user': + if p_type.lower() == "user": return [ports[1]] - elif p_type.lower() == 'system': + elif p_type.lower() == "system": # check if ports contain the word Brain in the description and return that port for port in ports: if "Brain" in port.description: return [port] return [ports[0], *joystick_ports] else: - raise ValueError(f'Invalid port type specified: {p_type}') + raise ValueError(f"Invalid port type specified: {p_type}") # these can now also be used as user ports if len(joystick_ports) > 0: # and p_type.lower() == 'system': return joystick_ports @@ -116,10 +116,10 @@ def wrapped(device, *args, **kwargs): return wrapped -def compress_file(file: BinaryIO, file_len: int, label='Compressing binary') -> Tuple[BinaryIO, int]: +def compress_file(file: BinaryIO, file_len: int, label="Compressing binary") -> Tuple[BinaryIO, int]: buf = io.BytesIO() with ui.progressbar(length=file_len, label=label) as progress: - with gzip.GzipFile(fileobj=buf, mode='wb', mtime=0) as f: + with gzip.GzipFile(fileobj=buf, mode="wb", mtime=0) as f: while True: data = file.read(16 * 1024) if not data: @@ -133,8 +133,8 @@ def compress_file(file: BinaryIO, file_len: int, label='Compressing binary') -> class V5Device(VEXDevice, SystemDevice): - vid_map = {'user': 1, 'system': 15, 'rms': 16, 'pros': 24, 'mw': 32} # type: Dict[str, int] - channel_map = {'pit': 0, 'download': 1} # type: Dict[str, int] + vid_map = {"user": 1, "system": 15, "rms": 16, "pros": 24, "mw": 32} # type: Dict[str, int] + channel_map = {"pit": 0, "download": 1} # type: Dict[str, int] class FTCompleteOptions(IntEnum): DONT_RUN = 0 @@ -160,24 +160,24 @@ class ControllerFlags(IntFlag): def __init__(self, data: tuple): from semantic_version import Version - self.system_version = Version('{}.{}.{}-{}.{}'.format(*data[0:5])) + self.system_version = Version("{}.{}.{}-{}.{}".format(*data[0:5])) self.product = V5Device.SystemVersion.Product(data[5]) self.product_flags = self.flag_map[self.product](data[6]) def __str__(self): return ( - f'System Version: {self.system_version}\n' - f' Product: {self.product.name}\n' - f' Product Flags: {self.product_flags.value:x}' + f"System Version: {self.system_version}\n" + f" Product: {self.product.name}\n" + f" Product Flags: {self.product_flags.value:x}" ) class SystemStatus(object): def __init__(self, data: tuple): from semantic_version import Version - self.system_version = Version('{}.{}.{}-{}'.format(*data[0:4])) - self.cpu0_version = Version('{}.{}.{}-{}'.format(*data[4:8])) - self.cpu1_version = Version('{}.{}.{}-{}'.format(*data[8:12])) + self.system_version = Version("{}.{}.{}-{}".format(*data[0:4])) + self.cpu0_version = Version("{}.{}.{}-{}".format(*data[4:8])) + self.cpu1_version = Version("{}.{}.{}-{}".format(*data[8:12])) self.touch_version = data[12] self.system_id = data[13] @@ -186,11 +186,11 @@ def __getitem__(self, item): def __init__(self, port: BasePort): self._status = None - self._serial_cache = b'' + self._serial_cache = b"" super().__init__(port) class DownloadChannel(object): - def __init__(self, device: 'V5Device', timeout: float = 5.0): + def __init__(self, device: "V5Device", timeout: float = 5.0): self.device = device self.timeout = timeout self.did_switch = False @@ -200,11 +200,11 @@ def __enter__(self): if version.product == V5Device.SystemVersion.Product.CONTROLLER: self.device.default_timeout = 2.0 if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: - raise VEXCommError('V5 Controller doesn\'t appear to be connected to a V5 Brain', version) - ui.echo('Transferring V5 to download channel') - self.device.ft_transfer_channel('download') + raise VEXCommError("V5 Controller doesn't appear to be connected to a V5 Brain", version) + ui.echo("Transferring V5 to download channel") + self.device.ft_transfer_channel("download") self.did_switch = True - logger(__name__).debug('Sleeping for a while to let V5 start channel transfer') + logger(__name__).debug("Sleeping for a while to let V5 start channel transfer") time.sleep(0.25) # wait at least 250ms before starting to poll controller if it's connected yet version = self.device.query_system_version() start_time = time.time() @@ -216,16 +216,16 @@ def __enter__(self): version = self.device.query_system_version() time.sleep(0.25) if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: - raise VEXCommError('Could not transfer V5 Controller to download channel', version) - logger(__name__).info('V5 should been transferred to higher bandwidth download channel') + raise VEXCommError("Could not transfer V5 Controller to download channel", version) + logger(__name__).info("V5 should been transferred to higher bandwidth download channel") return self else: return self def __exit__(self, *exc): if self.did_switch: - self.device.ft_transfer_channel('pit') - ui.echo('V5 has been transferred back to pit channel') + self.device.ft_transfer_channel("pit") + ui.echo("V5 has been transferred back to pit channel") @property def status(self): @@ -235,7 +235,7 @@ def status(self): @property def can_compress(self): - return self.status['system_version'] in Spec('>=1.0.5') + return self.status["system_version"] in Spec(">=1.0.5") @property def is_wireless(self): @@ -251,73 +251,73 @@ def generate_cold_hash(self, project: Project, extra: dict): from base64 import b64encode from hashlib import md5 - msg = str(sorted(keys, key=lambda t: t[0])).encode('ascii') - name = b64encode(md5(msg).digest()).rstrip(b'=').decode('ascii') - if Spec('<=1.0.0-27').match(self.status['cpu0_version']): + msg = str(sorted(keys, key=lambda t: t[0])).encode("ascii") + name = b64encode(md5(msg).digest()).rstrip(b"=").decode("ascii") + if Spec("<=1.0.0-27").match(self.status["cpu0_version"]): # Bug prevents linked files from being > 18 characters long. # 17 characters is probably good enough for hash, so no need to fail out name = name[:17] return name def upload_project(self, project: Project, **kwargs): - assert project.target == 'v5' + assert project.target == "v5" monolith_path = project.location.joinpath(project.output) if monolith_path.exists(): - logger(__name__).debug(f'Monolith exists! ({monolith_path})') + logger(__name__).debug(f"Monolith exists! ({monolith_path})") if ( - 'hot_output' in project.templates['kernel'].metadata - and 'cold_output' in project.templates['kernel'].metadata + "hot_output" in project.templates["kernel"].metadata + and "cold_output" in project.templates["kernel"].metadata ): - hot_path = project.location.joinpath(project.templates['kernel'].metadata['hot_output']) - cold_path = project.location.joinpath(project.templates['kernel'].metadata['cold_output']) + hot_path = project.location.joinpath(project.templates["kernel"].metadata["hot_output"]) + cold_path = project.location.joinpath(project.templates["kernel"].metadata["cold_output"]) upload_hot_cold = False if hot_path.exists() and cold_path.exists(): - logger(__name__).debug(f'Hot and cold files exist! ({hot_path}; {cold_path})') + logger(__name__).debug(f"Hot and cold files exist! ({hot_path}; {cold_path})") if monolith_path.exists(): monolith_mtime = monolith_path.stat().st_mtime hot_mtime = hot_path.stat().st_mtime - logger(__name__).debug(f'Monolith last modified: {monolith_mtime}') - logger(__name__).debug(f'Hot last modified: {hot_mtime}') + logger(__name__).debug(f"Monolith last modified: {monolith_mtime}") + logger(__name__).debug(f"Hot last modified: {hot_mtime}") if hot_mtime > monolith_mtime: upload_hot_cold = True - logger(__name__).debug('Hot file is newer than monolith!') + logger(__name__).debug("Hot file is newer than monolith!") else: upload_hot_cold = True if upload_hot_cold: - with hot_path.open(mode='rb') as hot: - with cold_path.open(mode='rb') as cold: - kwargs['linked_file'] = cold - kwargs['linked_remote_name'] = self.generate_cold_hash(project, {}) - kwargs['linked_file_addr'] = int( - project.templates['kernel'].metadata.get('cold_addr', 0x03800000) + with hot_path.open(mode="rb") as hot: + with cold_path.open(mode="rb") as cold: + kwargs["linked_file"] = cold + kwargs["linked_remote_name"] = self.generate_cold_hash(project, {}) + kwargs["linked_file_addr"] = int( + project.templates["kernel"].metadata.get("cold_addr", 0x03800000) ) - kwargs['addr'] = int(project.templates['kernel'].metadata.get('hot_addr', 0x07800000)) + kwargs["addr"] = int(project.templates["kernel"].metadata.get("hot_addr", 0x07800000)) return self.write_program(hot, **kwargs) if not monolith_path.exists(): - raise ui.dont_send(Exception('No output files were found! Have you built your project?')) - with monolith_path.open(mode='rb') as pf: + raise ui.dont_send(Exception("No output files were found! Have you built your project?")) + with monolith_path.open(mode="rb") as pf: return self.write_program(pf, **kwargs) def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigParser = None, **kwargs): project_ini = ConfigParser() - default_icon = 'USER902x.bmp' if Spec('>=1.0.0-22').match(self.status['cpu0_version']) else 'USER999x.bmp' - project_ini['project'] = { - 'version': str(kwargs.get('ide_version') or get_version()), - 'ide': str(kwargs.get('ide') or 'PROS'), + default_icon = "USER902x.bmp" if Spec(">=1.0.0-22").match(self.status["cpu0_version"]) else "USER999x.bmp" + project_ini["project"] = { + "version": str(kwargs.get("ide_version") or get_version()), + "ide": str(kwargs.get("ide") or "PROS"), } - project_ini['program'] = { - 'version': kwargs.get('version', '0.0.0') or '0.0.0', - 'name': remote_name, - 'slot': slot, - 'icon': kwargs.get('icon', default_icon) or default_icon, - 'description': kwargs.get('description', 'Created with PROS'), - 'date': datetime.now().isoformat(), + project_ini["program"] = { + "version": kwargs.get("version", "0.0.0") or "0.0.0", + "name": remote_name, + "slot": slot, + "icon": kwargs.get("icon", default_icon) or default_icon, + "description": kwargs.get("description", "Created with PROS"), + "date": datetime.now().isoformat(), } if ini: project_ini.update(ini) with StringIO() as ini_str: project_ini.write(ini_str) - logger(__name__).info(f'Created ini: {ini_str.getvalue()}') + logger(__name__).info(f"Created ini: {ini_str.getvalue()}") return ini_str.getvalue() @with_download_channel @@ -329,7 +329,7 @@ def write_program( slot: int = 0, file_len: int = -1, run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, - target: str = 'flash', + target: str = "flash", quirk: int = 0, linked_file: Optional[typing.BinaryIO] = None, linked_remote_name: Optional[str] = None, @@ -340,21 +340,21 @@ def write_program( with ui.Notification(): action_string = f'Uploading program "{remote_name}"' finish_string = f'Finished uploading "{remote_name}"' - if hasattr(file, 'name'): - action_string += f' ({remote_name if remote_name else Path(file.name).name})' - finish_string += f' ({remote_name if remote_name else Path(file.name).name})' - action_string += f' to V5 slot {slot + 1} on {self.port}' + if hasattr(file, "name"): + action_string += f" ({remote_name if remote_name else Path(file.name).name})" + finish_string += f" ({remote_name if remote_name else Path(file.name).name})" + action_string += f" to V5 slot {slot + 1} on {self.port}" if compress_bin: - action_string += ' (compressed)' + action_string += " (compressed)" ui.echo(action_string) - remote_base = f'slot_{slot + 1}' - if target == 'ddr': + remote_base = f"slot_{slot + 1}" + if target == "ddr": self.write_file( file, - f'{remote_base}.bin', + f"{remote_base}.bin", file_len=file_len, - type='bin', - target='ddr', + type="bin", + target="ddr", run_after=run_after, linked_filename=linked_remote_name, **kwargs, @@ -365,11 +365,11 @@ def write_program( if not remote_name: remote_name = file.name if len(remote_name) > 23: - logger(__name__).info('Truncating remote name to {} for length.'.format(remote_name[:20])) + logger(__name__).info("Truncating remote name to {} for length.".format(remote_name[:20])) remote_name = remote_name[:23] ini_file = self.generate_ini_file(remote_name=remote_name, slot=slot, ini=ini, **kwargs) - logger(__name__).info(f'Created ini: {ini_file}') + logger(__name__).info(f"Created ini: {ini_file}") if linked_file is not None: self.upload_library( @@ -377,37 +377,37 @@ def write_program( remote_name=linked_remote_name, addr=linked_file_addr, compress=compress_bin, - force_upload=kwargs.pop('force_upload_linked', False), + force_upload=kwargs.pop("force_upload_linked", False), ) - bin_kwargs = {k: v for k, v in kwargs.items() if v in ['addr']} + bin_kwargs = {k: v for k, v in kwargs.items() if v in ["addr"]} if (quirk & 0xFF) == 1: # WRITE BIN FILE self.write_file( file, - f'{remote_base}.bin', + f"{remote_base}.bin", file_len=file_len, - type='bin', + type="bin", run_after=run_after, linked_filename=linked_remote_name, compress=compress_bin, **bin_kwargs, **kwargs, ) - with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: + with BytesIO(ini_file.encode(encoding="ascii")) as ini_bin: # WRITE INI FILE - self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) + self.write_file(ini_bin, f"{remote_base}.ini", type="ini", **kwargs) elif (quirk & 0xFF) == 0: # STOP PROGRAM - self.execute_program_file('', run=False) - with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: + self.execute_program_file("", run=False) + with BytesIO(ini_file.encode(encoding="ascii")) as ini_bin: # WRITE INI FILE - self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) + self.write_file(ini_bin, f"{remote_base}.ini", type="ini", **kwargs) # WRITE BIN FILE self.write_file( file, - f'{remote_base}.bin', + f"{remote_base}.bin", file_len=file_len, - type='bin', + type="bin", run_after=run_after, linked_filename=linked_remote_name, compress=compress_bin, @@ -415,8 +415,8 @@ def write_program( **kwargs, ) else: - raise ValueError(f'Unknown quirk option: {quirk}') - ui.finalize('upload', f'{finish_string} to V5') + raise ValueError(f"Unknown quirk option: {quirk}") + ui.finalize("upload", f"{finish_string} to V5") def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, target_name: Optional[str] = None): """ @@ -438,7 +438,7 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, vid = self.vid_map[vid.lower()] # assume all libraries unused_libraries = [ - (vid, l['filename']) + (vid, l["filename"]) for l in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid=vid))] ] if name is not None: @@ -451,50 +451,50 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, programs: Dict[str, Dict] = { # need the linked file metadata, so we have to use the get_file_metadata_by_name command - p['filename']: self.get_file_metadata_by_name(p['filename'], vid='user') - for p in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid='user'))] - if p['type'] == 'bin' + p["filename"]: self.get_file_metadata_by_name(p["filename"], vid="user") + for p in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid="user"))] + if p["type"] == "bin" } library_usage: Dict[Tuple[int, str], List[str]] = defaultdict(list) for program_name, metadata in programs.items(): - library_usage[(metadata['linked_vid'], metadata['linked_filename'])].append(program_name) + library_usage[(metadata["linked_vid"], metadata["linked_filename"])].append(program_name) orphaned_files: List[Union[str, Tuple[int, str]]] = [] for link, program_names in library_usage.items(): linked_vid, linked_name = link if name is not None and linked_vid == vid and linked_name == name: - logger(__name__).debug(f'{program_names} will be removed because the library will be replaced') + logger(__name__).debug(f"{program_names} will be removed because the library will be replaced") orphaned_files.extend(program_names) elif linked_vid != 0: # linked_vid == 0 means there's no link. Can't be orphaned if there's no link if link in unused_libraries: # the library is being used - logger(__name__).debug(f'{link} is being used') + logger(__name__).debug(f"{link} is being used") unused_libraries.remove(link) used_libraries.append(link) else: try: self.get_file_metadata_by_name(linked_name, vid=linked_vid) - logger(__name__).debug(f'{link} exists') + logger(__name__).debug(f"{link} exists") used_libraries.extend(link) except VEXCommError as e: logger(__name__).debug(dont_send(e)) - logger(__name__).debug(f'{program_names} will be removed because {link} does not exist') + logger(__name__).debug(f"{program_names} will be removed because {link} does not exist") orphaned_files.extend(program_names) orphaned_files.extend(unused_libraries) if target_name is not None and target_name in orphaned_files: # the file will be overwritten anyway orphaned_files.remove(target_name) if len(orphaned_files) > 0: - logger(__name__).warning(f'Removing {len(orphaned_files)} orphaned file(s) ({orphaned_files})') + logger(__name__).warning(f"Removing {len(orphaned_files)} orphaned file(s) ({orphaned_files})") for file in orphaned_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: - self.erase_file(file_name=file, erase_all=True, vid='user') + self.erase_file(file_name=file, erase_all=True, vid="user") if len(used_libraries) > 3: libraries = [ - (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)['timestamp']) + (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)["timestamp"]) for linked_vid, linked_name in used_libraries ] library_usage_timestamps = sorted( @@ -505,7 +505,7 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, # get the most recent timestamp of the library and all files linking to it max( linked_timestamp, - *[programs[p]['timestamp'] for p in library_usage[(linked_vid, linked_name)]], + *[programs[p]["timestamp"] for p in library_usage[(linked_vid, linked_name)]], ), ) for linked_vid, linked_name, linked_timestamp in libraries @@ -513,31 +513,31 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, key=lambda t: t[2], ) evicted_files: List[Union[str, Tuple[int, str]]] = [] - evicted_file_list = '' + evicted_file_list = "" for evicted_library in library_usage_timestamps[:3]: evicted_files.append(evicted_library[0:2]) evicted_files.extend(library_usage[evicted_library[0:2]]) - evicted_file_list += evicted_library[1] + ', ' - evicted_file_list += ', '.join(library_usage[evicted_file_list[0:2]]) + evicted_file_list += evicted_library[1] + ", " + evicted_file_list += ", ".join(library_usage[evicted_file_list[0:2]]) evicted_file_list = evicted_file_list[:2] # remove last ", " assert len(evicted_files) > 0 if confirm( - f'There are too many files on the V5. PROS can remove the following suggested old files: ' - f'{evicted_file_list}', - title='Confirm file eviction plan:', + f"There are too many files on the V5. PROS can remove the following suggested old files: " + f"{evicted_file_list}", + title="Confirm file eviction plan:", ): for file in evicted_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: - self.erase_file(file_name=file, erase_all=True, vid='user') + self.erase_file(file_name=file, erase_all=True, vid="user") def upload_library( self, file: typing.BinaryIO, remote_name: str = None, file_len: int = -1, - vid: int_str = 'pros', + vid: int_str = "pros", force_upload: bool = False, compress: bool = True, **kwargs, @@ -551,7 +551,7 @@ def upload_library( if not remote_name: remote_name = file.name if len(remote_name) > 23: - logger(__name__).info('Truncating remote name to {} for length.'.format(remote_name[:23])) + logger(__name__).info("Truncating remote name to {} for length.".format(remote_name[:23])) remote_name = remote_name[:23] if file_len < 0: @@ -559,7 +559,7 @@ def upload_library( file.seek(0, 0) if compress and self.can_compress: - file, file_len = compress_file(file, file_len, label='Compressing library') + file, file_len = compress_file(file, file_len, label="Compressing library") crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) @@ -568,22 +568,22 @@ def upload_library( try: response = self.get_file_metadata_by_name(remote_name, vid) logger(__name__).debug(response) - logger(__name__).debug({'file len': file_len, 'crc': crc32}) - if response['size'] == file_len and response['crc'] == crc32: - ui.echo('Library is already onboard V5') + logger(__name__).debug({"file len": file_len, "crc": crc32}) + if response["size"] == file_len and response["crc"] == crc32: + ui.echo("Library is already onboard V5") return else: logger(__name__).warning( - f'Library onboard doesn\'t match! ' + f"Library onboard doesn't match! " f'Length was {response["size"]} but expected {file_len} ' f'CRC: was {response["crc"]:x} but expected {crc32:x}' ) except VEXCommError as e: logger(__name__).debug(e) else: - logger(__name__).info('Skipping already-uploaded checks') + logger(__name__).info("Skipping already-uploaded checks") - logger(__name__).debug('Going to worry about uploading the file now') + logger(__name__).debug("Going to worry about uploading the file now") self.ensure_library_space( remote_name, vid, @@ -594,8 +594,8 @@ def read_file( self, file: typing.IO[bytes], remote_file: str, - vid: int_str = 'user', - target: int_str = 'flash', + vid: int_str = "user", + target: int_str = "flash", addr: Optional[int] = None, file_len: Optional[int] = None, ): @@ -603,29 +603,29 @@ def read_file( vid = self.vid_map[vid.lower()] if addr is None: metadata = self.get_file_metadata_by_name(remote_file, vid=vid) - addr = metadata['addr'] + addr = metadata["addr"] wireless = self.is_wireless - ft_meta = self.ft_initialize(remote_file, function='download', vid=vid, target=target, addr=addr) + ft_meta = self.ft_initialize(remote_file, function="download", vid=vid, target=target, addr=addr) if file_len is None: - file_len = ft_meta['file_size'] + file_len = ft_meta["file_size"] if wireless and file_len > 0x25000: confirm( - f'You\'re about to download {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider downloading directly with a wire.', + f"You're about to download {file_len} bytes wirelessly. This could take some time, and you should " + f"consider downloading directly with a wire.", abort=True, default=False, ) - max_packet_size = ft_meta['max_packet_size'] - with ui.progressbar(length=file_len, label='Downloading {}'.format(remote_file)) as progress: + max_packet_size = ft_meta["max_packet_size"] + with ui.progressbar(length=file_len, label="Downloading {}".format(remote_file)) as progress: for i in range(0, file_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > file_len: packet_size = file_len - i file.write(self.ft_read(addr + i, packet_size)) progress.update(packet_size) - logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) + logger(__name__).debug("Completed {} of {} bytes".format(i + packet_size, file_len)) self.ft_complete() def write_file( @@ -635,7 +635,7 @@ def write_file( file_len: int = -1, run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, linked_filename: Optional[str] = None, - linked_vid: int_str = 'pros', + linked_vid: int_str = "pros", compress: bool = False, **kwargs, ): @@ -643,43 +643,43 @@ def write_file( file_len = file.seek(0, 2) file.seek(0, 0) display_name = remote_file - if hasattr(file, 'name'): - display_name = f'{remote_file} ({Path(file.name).name})' + if hasattr(file, "name"): + display_name = f"{remote_file} ({Path(file.name).name})" if compress and self.can_compress: file, file_len = compress_file(file, file_len) if self.is_wireless and file_len > 0x25000: confirm( - f'You\'re about to upload {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider uploading directly with a wire.', + f"You're about to upload {file_len} bytes wirelessly. This could take some time, and you should " + f"consider uploading directly with a wire.", abort=True, default=False, ) crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) - addr = kwargs.get('addr', 0x03800000) - logger(__name__).info('Transferring {} ({} bytes) to the V5 from {}'.format(remote_file, file_len, file)) - ft_meta = self.ft_initialize(remote_file, function='upload', length=file_len, crc=crc32, **kwargs) + addr = kwargs.get("addr", 0x03800000) + logger(__name__).info("Transferring {} ({} bytes) to the V5 from {}".format(remote_file, file_len, file)) + ft_meta = self.ft_initialize(remote_file, function="upload", length=file_len, crc=crc32, **kwargs) if linked_filename is not None: - logger(__name__).debug('Setting file link') + logger(__name__).debug("Setting file link") self.ft_set_link(linked_filename, vid=linked_vid) - assert ft_meta['file_size'] >= file_len + assert ft_meta["file_size"] >= file_len if len(remote_file) > 24: - logger(__name__).info('Truncating {} to {} due to length'.format(remote_file, remote_file[:24])) + logger(__name__).info("Truncating {} to {} due to length".format(remote_file, remote_file[:24])) remote_file = remote_file[:24] - max_packet_size = int(ft_meta['max_packet_size'] / 2) - with ui.progressbar(length=file_len, label='Uploading {}'.format(display_name)) as progress: + max_packet_size = int(ft_meta["max_packet_size"] / 2) + with ui.progressbar(length=file_len, label="Uploading {}".format(display_name)) as progress: for i in range(0, file_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > file_len: packet_size = file_len - i - logger(__name__).debug('Writing {} bytes at 0x{:02X}'.format(packet_size, addr + i)) + logger(__name__).debug("Writing {} bytes at 0x{:02X}".format(packet_size, addr + i)) self.ft_write(addr + i, file.read(packet_size)) progress.update(packet_size) - logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) - logger(__name__).debug('Data transfer complete, sending ft complete') - if compress and self.status['system_version'] in Spec('>=1.0.5'): - logger(__name__).info('Closing gzip file') + logger(__name__).debug("Completed {} of {} bytes".format(i + packet_size, file_len)) + logger(__name__).debug("Data transfer complete, sending ft complete") + if compress and self.status["system_version"] in Spec(">=1.0.5"): + logger(__name__).info("Closing gzip file") file.close() self.ft_complete(options=run_after) @@ -690,9 +690,9 @@ def capture_screen(self) -> Tuple[List[List[int]], int, int]: file_size = width * height * 4 # ARGB rx_io = BytesIO() - self.read_file(rx_io, '', vid='system', target='screen', addr=0, file_len=file_size) + self.read_file(rx_io, "", vid="system", target="screen", addr=0, file_len=file_size) rx = rx_io.getvalue() - rx = struct.unpack('<{}I'.format(len(rx) // 4), rx) + rx = struct.unpack("<{}I".format(len(rx) // 4), rx) data = [[] for _ in range(height)] for y in range(height): @@ -709,8 +709,8 @@ def used_slots(self) -> Dict[int, Optional[str]]: with ui.Notification(): rv = {} for slot in range(1, 9): - ini = self.read_ini(f'slot_{slot}.ini') - rv[slot] = ini['program']['name'] if ini is not None else None + ini = self.read_ini(f"slot_{slot}.ini") + rv[slot] = ini["program"]["name"] if ini is not None else None return rv def read_ini(self, remote_name: str) -> Optional[ConfigParser]: @@ -719,96 +719,96 @@ def read_ini(self, remote_name: str) -> Optional[ConfigParser]: self.read_file(rx_io, remote_name) config = ConfigParser() rx_io.seek(0, 0) - config.read_string(rx_io.read().decode('ascii')) + config.read_string(rx_io.read().decode("ascii")) return config except VEXCommError: return None @retries def query_system_version(self) -> SystemVersion: - logger(__name__).debug('Sending simple 0xA408 command') - ret = self._txrx_simple_struct(0xA4, '>8B') - logger(__name__).debug('Completed simple 0xA408 command') + logger(__name__).debug("Sending simple 0xA408 command") + ret = self._txrx_simple_struct(0xA4, ">8B") + logger(__name__).debug("Completed simple 0xA408 command") return V5Device.SystemVersion(ret) @retries def ft_transfer_channel(self, channel: int_str): - logger(__name__).debug(f'Transferring to {channel} channel') - logger(__name__).debug('Sending ext 0x10 command') + logger(__name__).debug(f"Transferring to {channel} channel") + logger(__name__).debug("Sending ext 0x10 command") if isinstance(channel, str): channel = self.channel_map[channel] assert isinstance(channel, int) and 0 <= channel <= 1 - self._txrx_ext_packet(0x10, struct.pack('<2B', 1, channel), rx_length=0) - logger(__name__).debug('Completed ext 0x10 command') + self._txrx_ext_packet(0x10, struct.pack("<2B", 1, channel), rx_length=0) + logger(__name__).debug("Completed ext 0x10 command") @retries def ft_initialize(self, file_name: str, **kwargs) -> Dict[str, Any]: - logger(__name__).debug('Sending ext 0x11 command') + logger(__name__).debug("Sending ext 0x11 command") options = { - 'function': 'upload', - 'target': 'flash', - 'vid': 'user', - 'overwrite': True, - 'options': 0, - 'length': 0, - 'addr': 0x03800000, - 'crc': 0, - 'type': 'bin', - 'timestamp': datetime.now(), - 'version': 0x01_00_00_00, - 'name': file_name, + "function": "upload", + "target": "flash", + "vid": "user", + "overwrite": True, + "options": 0, + "length": 0, + "addr": 0x03800000, + "crc": 0, + "type": "bin", + "timestamp": datetime.now(), + "version": 0x01_00_00_00, + "name": file_name, } options.update({k: v for k, v in kwargs.items() if k in options and v is not None}) - if isinstance(options['function'], str): - options['function'] = {'upload': 1, 'download': 2}[options['function'].lower()] - if isinstance(options['target'], str): - options['target'] = {'ddr': 0, 'flash': 1, 'screen': 2}[options['target'].lower()] - if isinstance(options['vid'], str): - options['vid'] = self.vid_map[options['vid'].lower()] - if isinstance(options['type'], str): - options['type'] = options['type'].encode(encoding='ascii') - if isinstance(options['name'], str): - options['name'] = options['name'].encode(encoding='ascii') - options['options'] |= 1 if options['overwrite'] else 0 - options['timestamp'] = int((options['timestamp'] - datetime(2000, 1, 1)).total_seconds()) - - logger(__name__).debug('Initializing file transfer w/: {}'.format(options)) + if isinstance(options["function"], str): + options["function"] = {"upload": 1, "download": 2}[options["function"].lower()] + if isinstance(options["target"], str): + options["target"] = {"ddr": 0, "flash": 1, "screen": 2}[options["target"].lower()] + if isinstance(options["vid"], str): + options["vid"] = self.vid_map[options["vid"].lower()] + if isinstance(options["type"], str): + options["type"] = options["type"].encode(encoding="ascii") + if isinstance(options["name"], str): + options["name"] = options["name"].encode(encoding="ascii") + options["options"] |= 1 if options["overwrite"] else 0 + options["timestamp"] = int((options["timestamp"] - datetime(2000, 1, 1)).total_seconds()) + + logger(__name__).debug("Initializing file transfer w/: {}".format(options)) tx_payload = struct.pack( "<4B3I4s2I24s", - options['function'], - options['target'], - options['vid'], - options['options'], - options['length'], - options['addr'], - options['crc'], - options['type'], - options['timestamp'], - options['version'], - options['name'], + options["function"], + options["target"], + options["vid"], + options["options"], + options["length"], + options["addr"], + options["crc"], + options["type"], + options["timestamp"], + options["version"], + options["name"], ) - rx = self._txrx_ext_struct(0x11, tx_payload, " bytearray: - logger(__name__).debug('Sending ext 0x14 command') + logger(__name__).debug("Sending ext 0x14 command") actual_n_bytes = n_bytes + (0 if n_bytes % 4 == 0 else 4 - n_bytes % 4) ui.logger(__name__).debug(dict(actual_n_bytes=actual_n_bytes, addr=addr)) tx_payload = struct.pack(" int: - logger(__name__).debug('Sending ext 0x16 command') + logger(__name__).debug("Sending ext 0x16 command") if isinstance(vid, str): vid = self.vid_map[vid.lower()] tx_payload = struct.pack("<2B", vid, options) ret = self._txrx_ext_struct(0x16, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x17 command') + logger(__name__).debug("Sending ext 0x17 command") tx_payload = struct.pack("<2B", file_idx, options) rx = self._txrx_ext_struct(0x17, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x19 command') + logger(__name__).debug("Sending ext 0x19 command") if isinstance(vid, str): vid = self.vid_map[vid.lower()] - ui.logger(__name__).debug(f'Options: {dict(vid=vid, file_name=file_name)}') - tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) + ui.logger(__name__).debug(f"Options: {dict(vid=vid, file_name=file_name)}") + tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding="ascii")) rx = self._txrx_ext_struct(0x19, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x1C command') - tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) + logger(__name__).debug("Sending ext 0x1C command") + tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding="ascii")) ret = self._txrx_ext_struct(0x1C, tx_payload, " SystemStatus: from semantic_version import Version - logger(__name__).debug('Sending ext 0x22 command') + logger(__name__).debug("Sending ext 0x22 command") version = self.query_system_version() - if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec('<1.0.13')) or ( + if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec("<1.0.13")) or ( version.product == V5Device.SystemVersion.Product.CONTROLLER - and version.system_version in Spec('<1.0.0-0.70') + and version.system_version in Spec("<1.0.0-0.70") ): - schema = ' bytes: # read/write are the same command, behavior dictated by specifying # length-to-read as 0xFF and providing additional payload bytes to write or # specifying a length-to-read and no additional data to read. - logger(__name__).debug('Sending ext 0x27 command (read)') + logger(__name__).debug("Sending ext 0x27 command (read)") # specifying a length to read (0x40 bytes) with no additional payload data. - tx_payload = struct.pack("<2B", self.channel_map['download'], 0x40) + tx_payload = struct.pack("<2B", self.channel_map["download"], 0x40) # RX length isn't always 0x40 (end of buffer reached), so don't check_length. self._serial_cache += self._txrx_ext_packet(0x27, tx_payload, 0, check_length=False)[1:] - logger(__name__).debug('Completed ext 0x27 command (read)') + logger(__name__).debug("Completed ext 0x27 command (read)") # if _serial_cache doesn't have a \x00, pretend we didn't read anything. - if b'\x00' not in self._serial_cache: - return b'' + if b"\x00" not in self._serial_cache: + return b"" # _serial_cache has a \x00, split off the beginning part and hand it down. - parts = self._serial_cache.split(b'\x00') - ret = parts[0] + b'\x00' - self._serial_cache = b'\x00'.join(parts[1:]) + parts = self._serial_cache.split(b"\x00") + ret = parts[0] + b"\x00" + self._serial_cache = b"\x00".join(parts[1:]) return ret @@ -997,16 +997,16 @@ def user_fifo_read(self) -> bytes: def user_fifo_write(self, payload: Union[Iterable, bytes, bytearray, str]): # Not currently implemented return - logger(__name__).debug('Sending ext 0x27 command (write)') + logger(__name__).debug("Sending ext 0x27 command (write)") max_packet_size = 224 pl_len = len(payload) for i in range(0, pl_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > pl_len: packet_size = pl_len - i - logger(__name__).debug(f'Writing {packet_size} bytes to user FIFO') - self._txrx_ext_packet(0x27, b'\x01\x00' + payload[i:packet_size], 0, check_length=False)[1:] - logger(__name__).debug('Completed ext 0x27 command (write)') + logger(__name__).debug(f"Writing {packet_size} bytes to user FIFO") + self._txrx_ext_packet(0x27, b"\x01\x00" + payload[i:packet_size], 0, check_length=False)[1:] + logger(__name__).debug("Completed ext 0x27 command (write)") @retries def sc_init(self) -> None: @@ -1014,35 +1014,35 @@ def sc_init(self) -> None: Send command to initialize screen capture """ # This will only copy data in memory, not send! - logger(__name__).debug('Sending ext 0x28 command') - self._txrx_ext_struct(0x28, [], '') - logger(__name__).debug('Completed ext 0x28 command') + logger(__name__).debug("Sending ext 0x28 command") + self._txrx_ext_struct(0x28, [], "") + logger(__name__).debug("Completed ext 0x28 command") @retries def kv_read(self, kv: str) -> bytearray: - logger(__name__).debug('Sending ext 0x2e command') - encoded_kv = f'{kv}\0'.encode(encoding='ascii') - tx_payload = struct.pack(f'<{len(encoded_kv)}s', encoded_kv) + logger(__name__).debug("Sending ext 0x2e command") + encoded_kv = f"{kv}\0".encode(encoding="ascii") + tx_payload = struct.pack(f"<{len(encoded_kv)}s", encoded_kv) # Because the length of the kernel variables is not known, use None to indicate we are recieving an unknown length. ret = self._txrx_ext_packet(0x2E, tx_payload, 1, check_length=False, check_ack=True) - logger(__name__).debug('Completed ext 0x2e command') + logger(__name__).debug("Completed ext 0x2e command") return ret @retries def kv_write(self, kv: str, payload: Union[Iterable, bytes, bytearray, str]): - logger(__name__).debug('Sending ext 0x2f command') - encoded_kv = f'{kv}\0'.encode(encoding='ascii') - kv_to_max_bytes = {'teamnumber': 7, 'robotname': 16} + logger(__name__).debug("Sending ext 0x2f command") + encoded_kv = f"{kv}\0".encode(encoding="ascii") + kv_to_max_bytes = {"teamnumber": 7, "robotname": 16} if len(payload) > kv_to_max_bytes.get(kv, 254): - print(f'Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).') + print(f"Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).") # Trim down size of payload to fit within the 255 byte limit and add null terminator. payload = payload[: kv_to_max_bytes.get(kv, 254)] + "\0" if isinstance(payload, str): - payload = payload.encode(encoding='ascii') - tx_fmt = f'<{len(encoded_kv)}s{len(payload)}s' + payload = payload.encode(encoding="ascii") + tx_fmt = f"<{len(encoded_kv)}s{len(payload)}s" tx_payload = struct.pack(tx_fmt, encoded_kv, payload) self._txrx_ext_packet(0x2F, tx_payload, 1, check_length=False, check_ack=True) - logger(__name__).debug('Completed ext 0x2f command') + logger(__name__).debug("Completed ext 0x2f command") return payload def _txrx_ext_struct( @@ -1073,7 +1073,7 @@ def _txrx_ext_struct( check_ack=check_ack, timeout=timeout, ) - logger(__name__).debug('Unpacking with format: {}'.format(unpack_fmt)) + logger(__name__).debug("Unpacking with format: {}".format(unpack_fmt)) return struct.unpack(unpack_fmt, rx) @classmethod @@ -1089,11 +1089,11 @@ def _rx_ext_packet( :param tx_payload: what was sent, used if an exception needs to be thrown :return: The payload of the extended message """ - assert msg['command'] == 0x56 + assert msg["command"] == 0x56 if not cls.VEX_CRC16.compute(msg.rx) == 0: raise VEXCommError("CRC of message didn't match 0: {}".format(cls.VEX_CRC16.compute(msg.rx)), msg) - assert msg['payload'][0] == command - msg = msg['payload'][1:-2] + assert msg["payload"][0] == command + msg = msg["payload"][1:-2] if check_ack: nacks = { 0xFF: "General NACK", @@ -1117,12 +1117,12 @@ def _rx_ext_packet( raise VEXCommError("Device didn't ACK", msg) msg = msg[1:] if len(msg) > 0: - logger(cls).debug('Set msg window to {}'.format(bytes_to_str(msg))) + logger(cls).debug("Set msg window to {}".format(bytes_to_str(msg))) if len(msg) < rx_length and check_length: - raise VEXCommError(f'Received length is less than {rx_length} (got {len(msg)}).', msg) + raise VEXCommError(f"Received length is less than {rx_length} (got {len(msg)}).", msg) elif len(msg) > rx_length and check_length: ui.echo( - f'WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()}).' + f"WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()})." ) return msg diff --git a/pros/serial/devices/vex/v5_user_device.py b/pros/serial/devices/vex/v5_user_device.py index 95ce2fc6..f6d88c9e 100644 --- a/pros/serial/devices/vex/v5_user_device.py +++ b/pros/serial/devices/vex/v5_user_device.py @@ -30,21 +30,21 @@ def promiscuous(self, value: bool): def write(self, data: Union[str, bytes]): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") self.port.write(data) def read(self) -> Tuple[bytes, bytes]: msg = None, None while msg[0] is None or (msg[0] not in self.topics and not self._accept_all): - while b'\0' not in self.buffer: + while b"\0" not in self.buffer: self.buffer.extend(self.port.read(1)) self.buffer.extend(self.port.read(-1)) - assert b'\0' in self.buffer - msg, self.buffer = self.buffer.split(b'\0', 1) + assert b"\0" in self.buffer + msg, self.buffer = self.buffer.split(b"\0", 1) try: msg = cobs.decode(msg) except cobs.DecodeError: - logger(__name__).warning(f'Could not decode bytes: {msg.hex()}') + logger(__name__).warning(f"Could not decode bytes: {msg.hex()}") assert len(msg) >= 4 msg = bytes(msg[:4]), bytes(msg[4:]) return msg diff --git a/pros/serial/devices/vex/vex_device.py b/pros/serial/devices/vex/vex_device.py index d234dd78..261415e6 100644 --- a/pros/serial/devices/vex/vex_device.py +++ b/pros/serial/devices/vex/vex_device.py @@ -29,7 +29,7 @@ def query_system(self) -> bytearray: Verify that a VEX device is connected. Returned payload varies by product :return: Payload response """ - logger(__name__).debug('Sending simple 0x21 command') + logger(__name__).debug("Sending simple 0x21 command") return self._txrx_simple_packet(0x21, 0x0A) def _txrx_simple_struct(self, command: int, unpack_fmt: str, timeout: Optional[float] = None) -> Tuple: @@ -46,11 +46,11 @@ def _txrx_simple_packet(self, command: int, rx_len: int, timeout: Optional[float :return: They payload of the message, or raises and exception if there was an issue """ msg = self._txrx_packet(command, timeout=timeout) - if msg['command'] != command: - raise comm_error.VEXCommError('Received command does not match sent command.', msg) - if len(msg['payload']) != rx_len: + if msg["command"] != command: + raise comm_error.VEXCommError("Received command does not match sent command.", msg) + if len(msg["payload"]) != rx_len: raise comm_error.VEXCommError("Received data doesn't match expected length", msg) - return msg['payload'] + return msg["payload"] def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[int, bytes, bytearray], Any]]: # Optimized to read as quickly as possible w/o delay @@ -82,18 +82,18 @@ def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[i rx.extend(self.port.read(1)) payload_length = rx[-1] if command == 0x56 and (payload_length & 0x80) == 0x80: - logger(__name__).debug('Found an extended message payload') + logger(__name__).debug("Found an extended message payload") rx.extend(self.port.read(1)) payload_length = ((payload_length & 0x7F) << 8) + rx[-1] payload = self.port.read(payload_length) rx.extend(payload) - return {'command': command, 'payload': payload, 'raw': rx} + return {"command": command, "payload": payload, "raw": rx} def _tx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None): tx = self._form_simple_packet(command) if tx_data is not None: tx = bytes([*tx, *tx_data]) - logger(__name__).debug(f'{self.__class__.__name__} TX: {bytes_to_str(tx)}') + logger(__name__).debug(f"{self.__class__.__name__} TX: {bytes_to_str(tx)}") self.port.read_all() self.port.write(tx) self.port.flush() @@ -113,10 +113,10 @@ def _txrx_packet( """ tx = self._tx_packet(command, tx_data) rx = self._rx_packet(timeout=timeout) - msg = Message(rx['raw'], tx) + msg = Message(rx["raw"], tx) logger(__name__).debug(msg) - msg['payload'] = Message(rx['raw'], tx, internal_rx=rx['payload']) - msg['command'] = rx['command'] + msg["payload"] = Message(rx["raw"], tx, internal_rx=rx["payload"]) + msg["command"] = rx["command"] return msg @staticmethod diff --git a/pros/serial/interactive/UploadProjectModal.py b/pros/serial/interactive/UploadProjectModal.py index 4336861b..7b0b7702 100644 --- a/pros/serial/interactive/UploadProjectModal.py +++ b/pros/serial/interactive/UploadProjectModal.py @@ -14,14 +14,14 @@ class UploadProjectModal(application.Modal[None]): def __init__(self, project: Optional[Project]): - super(UploadProjectModal, self).__init__('Upload Project', confirm_button='Upload') + super(UploadProjectModal, self).__init__("Upload Project", confirm_button="Upload") self.project: Optional[Project] = project self.project_path = ExistingProjectParameter( - str(project.location) if project else os.path.join(os.path.expanduser('~'), 'My PROS Project') + str(project.location) if project else os.path.join(os.path.expanduser("~"), "My PROS Project") ) - self.port = parameters.OptionParameter('', ['']) + self.port = parameters.OptionParameter("", [""]) self.save_settings = parameters.BooleanParameter(True) self.advanced_options: Dict[str, parameters.Parameter] = {} self.advanced_options_collapsed = parameters.BooleanParameter(True) @@ -40,51 +40,51 @@ def cleanup_poll_comports_thread(): cb(self.project_path) def update_slots(self): - assert self.project.target == 'v5' + assert self.project.target == "v5" if self.port.is_valid() and bool(self.port.value): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort device = V5Device(DirectPort(self.port.value)) slot_options = [ - f'{slot}' + ('' if program is None else f' (Currently: {program})') + f"{slot}" + ("" if program is None else f" (Currently: {program})") for slot, program in device.used_slots().items() ] else: slot_options = [str(i) for i in range(1, 9)] - project_name = self.advanced_options['name'].value - if 'slot' in self.project.upload_options: + project_name = self.advanced_options["name"].value + if "slot" in self.project.upload_options: # first, see if the project has it specified in its upload options - selected = slot_options[self.project.upload_options['slot'] - 1] + selected = slot_options[self.project.upload_options["slot"] - 1] else: # otherwise, try to do a name match - matched_slots = [i for i, slot in enumerate(slot_options) if slot.endswith(f'{project_name})')] + matched_slots = [i for i, slot in enumerate(slot_options) if slot.endswith(f"{project_name})")] if len(matched_slots) > 0: selected = slot_options[matched_slots[0]] - elif 'slot' in self.advanced_options: + elif "slot" in self.advanced_options: # or whatever the last value was - selected = slot_options[int(self.advanced_options['slot'].value[0]) - 1] + selected = slot_options[int(self.advanced_options["slot"].value[0]) - 1] else: # or just slot 1 selected = slot_options[0] - self.advanced_options['slot'] = parameters.OptionParameter(selected, slot_options) + self.advanced_options["slot"] = parameters.OptionParameter(selected, slot_options) def update_comports(self): list_all_comports.cache_clear() if isinstance(self.project, Project): options = {} - if self.project.target == 'v5': - options = {p.device for p in find_v5_ports('system')} - elif self.project.target == 'cortex': + if self.project.target == "v5": + options = {p.device for p in find_v5_ports("system")} + elif self.project.target == "cortex": options = [p.device for p in find_cortex_ports()] if options != {*self.port.options}: self.port.options = list(options) if self.port.value not in options: - self.port.update(self.port.options[0] if len(self.port.options) > 0 else 'No ports found') - ui.logger(__name__).debug('Updating ports') + self.port.update(self.port.options[0] if len(self.port.options) > 0 else "No ports found") + ui.logger(__name__).debug("Updating ports") - if self.project and self.project.target == 'v5': + if self.project and self.project.target == "v5": self.update_slots() self.redraw() @@ -100,13 +100,13 @@ def project_changed(self, new_project: ExistingProjectParameter): assert self.project is not None - if self.project.target == 'v5': + if self.project.target == "v5": self.advanced_options = { - 'name': parameters.Parameter(self.project.upload_options.get('remote_name', self.project.name)), - 'description': parameters.Parameter( - self.project.upload_options.get('description', 'Created with PROS') + "name": parameters.Parameter(self.project.upload_options.get("remote_name", self.project.name)), + "description": parameters.Parameter( + self.project.upload_options.get("description", "Created with PROS") ), - 'compress_bin': parameters.BooleanParameter(self.project.upload_options.get('compress_bin', True)), + "compress_bin": parameters.BooleanParameter(self.project.upload_options.get("compress_bin", True)), } self.update_slots() else: @@ -123,14 +123,14 @@ def confirm(self, *args, **kwargs): from pros.cli.upload import upload - kwargs = {'path': None, 'project': self.project, 'port': self.port.value} + kwargs = {"path": None, "project": self.project, "port": self.port.value} savable_kwargs = {} - if self.project.target == 'v5': - savable_kwargs['remote_name'] = self.advanced_options['name'].value + if self.project.target == "v5": + savable_kwargs["remote_name"] = self.advanced_options["name"].value # XXX: the first character is the slot number - savable_kwargs['slot'] = int(self.advanced_options['slot'].value[0]) - savable_kwargs['description'] = self.advanced_options['description'].value - savable_kwargs['compress_bin'] = self.advanced_options['compress_bin'].value + savable_kwargs["slot"] = int(self.advanced_options["slot"].value[0]) + savable_kwargs["description"] = self.advanced_options["description"].value + savable_kwargs["compress_bin"] = self.advanced_options["compress_bin"].value if self.save_settings.value: self.project.upload_options.update(savable_kwargs) @@ -152,16 +152,16 @@ def build(self) -> Generator[components.Component, None, None]: self.poll_comports_thread = Thread(target=with_click_context(self.poll_comports)) self.poll_comports_thread.start() - yield components.DirectorySelector('Project Directory', self.project_path) - yield components.DropDownBox('Port', self.port) - yield components.Checkbox('Save upload settings', self.save_settings) + yield components.DirectorySelector("Project Directory", self.project_path) + yield components.DropDownBox("Port", self.port) + yield components.Checkbox("Save upload settings", self.save_settings) - if isinstance(self.project, Project) and self.project.target == 'v5': + if isinstance(self.project, Project) and self.project.target == "v5": yield components.Container( - components.InputBox('Program Name', self.advanced_options['name']), - components.DropDownBox('Slot', self.advanced_options['slot']), - components.InputBox('Description', self.advanced_options['description']), - components.Checkbox('Compress Binary', self.advanced_options['compress_bin']), - title='Advanced V5 Options', + components.InputBox("Program Name", self.advanced_options["name"]), + components.DropDownBox("Slot", self.advanced_options["slot"]), + components.InputBox("Description", self.advanced_options["description"]), + components.Checkbox("Compress Binary", self.advanced_options["compress_bin"]), + title="Advanced V5 Options", collapsed=self.advanced_options_collapsed, ) diff --git a/pros/serial/interactive/__init__.py b/pros/serial/interactive/__init__.py index aa7f4062..ec961c20 100644 --- a/pros/serial/interactive/__init__.py +++ b/pros/serial/interactive/__init__.py @@ -1,3 +1,3 @@ from .UploadProjectModal import UploadProjectModal -__all__ = ['UploadProjectModal'] +__all__ = ["UploadProjectModal"] diff --git a/pros/serial/ports/__init__.py b/pros/serial/ports/__init__.py index e1d15175..a880d536 100644 --- a/pros/serial/ports/__init__.py +++ b/pros/serial/ports/__init__.py @@ -13,5 +13,5 @@ @lru_cache() def list_all_comports(): ports = list_ports.comports() - logger(__name__).debug('Connected: {}'.format(';'.join([str(p.__dict__) for p in ports]))) + logger(__name__).debug("Connected: {}".format(";".join([str(p.__dict__) for p in ports]))) return ports diff --git a/pros/serial/ports/direct_port.py b/pros/serial/ports/direct_port.py index 0fb20ad3..d18d36e2 100644 --- a/pros/serial/ports/direct_port.py +++ b/pros/serial/ports/direct_port.py @@ -11,7 +11,7 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial.Serial: try: - logger(__name__).debug(f'Opening serial port {port_name}') + logger(__name__).debug(f"Opening serial port {port_name}") port = serial.Serial( port_name, baudrate=115200, @@ -23,7 +23,7 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial port.inter_byte_timeout = 0.2 return port except serial.SerialException as e: - if any(msg in str(e) for msg in ['Access is denied', 'Errno 16', 'Errno 13']): + if any(msg in str(e) for msg in ["Access is denied", "Errno 16", "Errno 13"]): tb = sys.exc_info()[2] raise dont_send(ConnectionRefusedException(port_name, e).with_traceback(tb)) else: @@ -33,7 +33,7 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial class DirectPort(BasePort): def __init__(self, port_name: str, **kwargs): - self.serial: serial.Serial = create_serial_port(port_name=port_name, timeout=kwargs.pop('timeout', 1.0)) + self.serial: serial.Serial = create_serial_port(port_name=port_name, timeout=kwargs.pop("timeout", 1.0)) self.buffer: bytearray = bytearray() def read(self, n_bytes: int = 0) -> bytes: @@ -58,14 +58,14 @@ def read(self, n_bytes: int = 0) -> bytes: def write(self, data: Union[str, bytes]): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") self.serial.write(data) def flush(self): self.serial.flush() def destroy(self): - logger(__name__).debug(f'Destroying {self.__class__.__name__} to {self.serial.name}') + logger(__name__).debug(f"Destroying {self.__class__.__name__} to {self.serial.name}") self.serial.close() @property diff --git a/pros/serial/ports/exceptions.py b/pros/serial/ports/exceptions.py index 71843fee..44e63f30 100644 --- a/pros/serial/ports/exceptions.py +++ b/pros/serial/ports/exceptions.py @@ -9,9 +9,9 @@ def __init__(self, port_name: str, reason: Exception): self.port_name = port_name def __str__(self): - extra = '' - if os.name == 'posix': - extra = 'adding yourself to dialout group ' + extra = "" + if os.name == "posix": + extra = "adding yourself to dialout group " return ( f"could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " f"firmware utilities; moving to a different USB port; {extra}or " @@ -25,9 +25,9 @@ def __init__(self, port_name: str, reason: Exception): self.port_name = port_name def __str__(self): - extra = '' - if os.name == 'posix': - extra = 'adding yourself to dialout group ' + extra = "" + if os.name == "posix": + extra = "adding yourself to dialout group " return ( f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " f"firmware utilities; moving to a different USB port; {extra}or " diff --git a/pros/serial/ports/serial_share_bridge.py b/pros/serial/ports/serial_share_bridge.py index cc35cd6a..b06827c6 100644 --- a/pros/serial/ports/serial_share_bridge.py +++ b/pros/serial/ports/serial_share_bridge.py @@ -13,22 +13,22 @@ def get_port_num(serial_port_name: str, hash: str) -> int: - return sum("Powered by PROS: {}-{}".format(serial_port_name, hash).encode(encoding='ascii')) + return sum("Powered by PROS: {}-{}".format(serial_port_name, hash).encode(encoding="ascii")) def get_from_device_port_num(serial_port_name: str) -> int: - return get_port_num(serial_port_name, 'from') + return get_port_num(serial_port_name, "from") def get_to_device_port_num(serial_port_name: str) -> int: - return get_port_num(serial_port_name, 'to') + return get_port_num(serial_port_name, "to") class SerialShareBridge(object): def __init__( self, serial_port_name: str, - base_addr: str = '127.0.0.1', + base_addr: str = "127.0.0.1", to_device_port_num: int = None, from_device_port_num: int = None, ): @@ -56,16 +56,16 @@ def from_device_port_num(self): def start(self): # this function is still in the parent process - mp_ctx = multiprocessing.get_context('spawn') + mp_ctx = multiprocessing.get_context("spawn") barrier = multiprocessing.Barrier(3) - task = mp_ctx.Process(target=self._start, name='Serial Share Bridge', args=(barrier,)) + task = mp_ctx.Process(target=self._start, name="Serial Share Bridge", args=(barrier,)) task.daemon = False task.start() barrier.wait(1) return task def kill(self, do_join: bool = False): - logger(__name__).info('Killing serial share server due to watchdog') + logger(__name__).info("Killing serial share server due to watchdog") self.dying.set() self.port.destroy() if not self.zmq_ctx.closed: @@ -78,14 +78,14 @@ def kill(self, do_join: bool = False): def _start(self, initialization_barrier: multiprocessing.Barrier): try: - log_dir = os.path.join(get_pros_dir(), 'logs') + log_dir = os.path.join(get_pros_dir(), "logs") os.makedirs(log_dir, exist_ok=True) pros_logger = logging.getLogger(pros.__name__) pros_logger.setLevel(logging.DEBUG) - log_file_name = os.path.join(get_pros_dir(), 'logs', 'serial-share-bridge.log') + log_file_name = os.path.join(get_pros_dir(), "logs", "serial-share-bridge.log") handler = logging.handlers.TimedRotatingFileHandler(log_file_name, backupCount=1) handler.setLevel(logging.DEBUG) - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})'.format( + fmt_str = "%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})".format( self._serial_port_name ) handler.setFormatter(logging.Formatter(fmt_str)) @@ -95,10 +95,10 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): # timeout is none, so blocks indefinitely. Helps reduce CPU usage when there's nothing being recv self.port = DirectPort(self._serial_port_name, timeout=None) self.from_device_thread = threading.Thread( - target=self._from_device_loop, name='From Device Reader', daemon=False, args=(initialization_barrier,) + target=self._from_device_loop, name="From Device Reader", daemon=False, args=(initialization_barrier,) ) self.to_device_thread = threading.Thread( - target=self._to_device_loop, name='To Device Reader', daemon=False, args=(initialization_barrier,) + target=self._to_device_loop, name="To Device Reader", daemon=False, args=(initialization_barrier,) ) self.dying = threading.Event() # type: threading.Event self.from_device_thread.start() @@ -108,7 +108,7 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): pass logger(__name__).info( - 'Main serial share bridge thread is dying. Everything else should be dead: {}'.format( + "Main serial share bridge thread is dying. Everything else should be dead: {}".format( threading.active_count() - 1 ) ) @@ -122,9 +122,9 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): rxd = 0 try: from_ser_sock = self.zmq_ctx.socket(zmq.PUB) - addr = 'tcp://{}:{}'.format(self._base_addr, self._from_port_num) + addr = "tcp://{}:{}".format(self._base_addr, self._from_port_num) from_ser_sock.bind(addr) - logger(__name__).info('Bound from device broadcaster as a publisher to {}'.format(addr)) + logger(__name__).info("Bound from device broadcaster as a publisher to {}".format(addr)) initialization_barrier.wait() buffer = bytearray() while not self.dying.is_set(): @@ -133,28 +133,28 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): # then read everything available buffer.extend(self.port.read(1)) buffer.extend(self.port.read(-1)) - while b'\0' in buffer and not self.dying.is_set(): - msg, buffer = buffer.split(b'\0', 1) + while b"\0" in buffer and not self.dying.is_set(): + msg, buffer = buffer.split(b"\0", 1) msg = cobs.decode(msg) from_ser_sock.send_multipart((msg[:4], msg[4:])) rxd += 1 time.sleep(0) except Exception as e: # TODO: when getting a COBS decode error, rebroadcast the bytes on sout - logger(__name__).error('Unexpected error handling {}'.format(bytes_to_str(msg[:-1]))) + logger(__name__).error("Unexpected error handling {}".format(bytes_to_str(msg[:-1]))) logger(__name__).exception(e) errors += 1 logger(__name__).info( - 'Current from device broadcasting error rate: {} errors. {} successful. {}%'.format( + "Current from device broadcasting error rate: {} errors. {} successful. {}%".format( errors, rxd, errors / (errors + rxd) ) ) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) - logger(__name__).warning('From Device Broadcaster is dying now.') + logger(__name__).warning("From Device Broadcaster is dying now.") logger(__name__).info( - 'Current from device broadcasting error rate: {} errors. {} successful. {}%'.format( + "Current from device broadcasting error rate: {} errors. {} successful. {}%".format( errors, rxd, errors / (errors + rxd) ) ) @@ -166,10 +166,10 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): def _to_device_loop(self, initialization_barrier: multiprocessing.Barrier): try: to_ser_sock = self.zmq_ctx.socket(zmq.SUB) - addr = 'tcp://{}:{}'.format(self._base_addr, self._to_port_num) + addr = "tcp://{}:{}".format(self._base_addr, self._to_port_num) to_ser_sock.bind(addr) - to_ser_sock.setsockopt(zmq.SUBSCRIBE, b'') - logger(__name__).info('Bound to device broadcaster as a subscriber to {}'.format(addr)) + to_ser_sock.setsockopt(zmq.SUBSCRIBE, b"") + logger(__name__).info("Bound to device broadcaster as a subscriber to {}".format(addr)) watchdog = threading.Timer(10, self.kill) initialization_barrier.wait() watchdog.start() @@ -177,18 +177,18 @@ def _to_device_loop(self, initialization_barrier: multiprocessing.Barrier): msg = to_ser_sock.recv_multipart() if not msg or self.dying.is_set(): continue - if msg[0] == b'kick': - logger(__name__).debug('Kicking watchdog on server {}'.format(threading.current_thread())) + if msg[0] == b"kick": + logger(__name__).debug("Kicking watchdog on server {}".format(threading.current_thread())) watchdog.cancel() watchdog = threading.Timer(msg[1][1] if len(msg) > 1 and len(msg[1]) > 0 else 5, self.kill) watchdog.start() - elif msg[0] == b'send': - logger(self).debug('Writing {} to {}'.format(bytes_to_str(msg[1]), self.port.port_name)) + elif msg[0] == b"send": + logger(self).debug("Writing {} to {}".format(bytes_to_str(msg[1]), self.port.port_name)) self.port.write(msg[1]) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) - logger(__name__).warning('To Device Broadcaster is dying now.') + logger(__name__).warning("To Device Broadcaster is dying now.") try: self.kill(do_join=False) except: diff --git a/pros/serial/ports/serial_share_port.py b/pros/serial/ports/serial_share_port.py index 5f5691de..1a9df09a 100644 --- a/pros/serial/ports/serial_share_port.py +++ b/pros/serial/ports/serial_share_port.py @@ -6,8 +6,8 @@ class SerialSharePort(BasePort): def __init__( self, port_name: str, - topic: bytes = b'sout', - addr: str = '127.0.0.1', + topic: bytes = b"sout", + addr: str = "127.0.0.1", to_device_port: int = None, from_device_port: int = None, ): @@ -29,20 +29,20 @@ def __init__( self.from_device_sock = self.ctx.socket(zmq.SUB) # type: zmq.Socket self.from_device_sock.setsockopt(zmq.SUBSCRIBE, self.topic) - self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b'kdbg') - self.from_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._from_port_num)) + self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b"kdbg") + self.from_device_sock.connect("tcp://{}:{}".format(self._base_addr, self._from_port_num)) logger(__name__).info( - 'Connected from device as a subscriber on tcp://{}:{}'.format(self._base_addr, self._from_port_num) + "Connected from device as a subscriber on tcp://{}:{}".format(self._base_addr, self._from_port_num) ) self.to_device_sock = self.ctx.socket(zmq.PUB) # type: zmq.Socket - self.to_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._to_port_num)) + self.to_device_sock.connect("tcp://{}:{}".format(self._base_addr, self._to_port_num)) logger(__name__).info( - 'Connected to device as a publisher on tcp://{}:{}'.format(self._base_addr, self._to_port_num) + "Connected to device as a publisher on tcp://{}:{}".format(self._base_addr, self._to_port_num) ) self.alive = threading.Event() - self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name='Client Kicker') + self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name="Client Kicker") self.watchdog_thread.start() def read(self, n_bytes: int = -1): @@ -58,22 +58,22 @@ def read_packet(self): def write(self, data: AnyStr): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") assert isinstance(data, bytes) - self.to_device_sock.send_multipart([b'send', data]) + self.to_device_sock.send_multipart([b"send", data]) def subscribe(self, topic: bytes): assert len(topic) == 4 - self.write(bytearray([*b'pRe', *topic])) + self.write(bytearray([*b"pRe", *topic])) self.from_device_sock.subscribe(topic=topic) def unsubscribe(self, topic: bytes): assert len(topic) == 4 - self.write(bytearray([*b'pRd', *topic])) + self.write(bytearray([*b"pRd", *topic])) self.from_device_sock.unsubscribe(topic=topic) def destroy(self): - logger(__name__).info('Destroying {}'.format(self)) + logger(__name__).info("Destroying {}".format(self)) self.alive.set() if self.watchdog_thread.is_alive(): self.watchdog_thread.join() @@ -85,7 +85,7 @@ def destroy(self): def _kick_watchdog(self): time.sleep(0.5) while not self.alive.is_set(): - logger(__name__).debug('Kicking server from {}'.format(threading.current_thread())) - self.to_device_sock.send_multipart([b'kick']) + logger(__name__).debug("Kicking server from {}".format(threading.current_thread())) + self.to_device_sock.send_multipart([b"kick"]) self.alive.wait(2.5) - logger(__name__).info('Watchdog kicker is dying') + logger(__name__).info("Watchdog kicker is dying") diff --git a/pros/serial/terminal/terminal.py b/pros/serial/terminal/terminal.py index 05710568..4bd805f7 100644 --- a/pros/serial/terminal/terminal.py +++ b/pros/serial/terminal/terminal.py @@ -59,7 +59,7 @@ def __exit__(self, *args, **kwargs): self.setup() -if os.name == 'nt': # noqa +if os.name == "nt": # noqa import ctypes import msvcrt @@ -112,7 +112,7 @@ def cancel(self): hwnd = ctypes.windll.kernel32.GetConsoleWindow() ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0D, 0) -elif os.name == 'posix': +elif os.name == "posix": import atexit import select import termios @@ -155,7 +155,7 @@ def cleanup(self): termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old) else: - raise NotImplementedError('Sorry no implementation for your platform ({})' ' available.'.format(sys.platform)) + raise NotImplementedError("Sorry no implementation for your platform ({})" " available.".format(sys.platform)) class Terminal(object): @@ -165,8 +165,8 @@ def __init__( self, port_instance: StreamDevice, transformations=(), output_raw: bool = False, request_banner: bool = True ): self.device = port_instance - self.device.subscribe(b'sout') - self.device.subscribe(b'serr') + self.device.subscribe(b"sout") + self.device.subscribe(b"serr") self.transformations = transformations self._reader_alive = None self.receiver_thread = None # type: threading.Thread @@ -182,7 +182,7 @@ def __init__( def _start_rx(self): self._reader_alive = True - self.receiver_thread = threading.Thread(target=self.reader, name='serial-rx-term') + self.receiver_thread = threading.Thread(target=self.reader, name="serial-rx-term") self.receiver_thread.daemon = True self.receiver_thread.start() @@ -192,7 +192,7 @@ def _stop_rx(self): def _start_tx(self): self._transmitter_alive = True - self.transmitter_thread = threading.Thread(target=self.transmitter, name='serial-tx-term') + self.transmitter_thread = threading.Thread(target=self.transmitter, name="serial-tx-term") self.transmitter_thread.daemon = True self.transmitter_thread.start() @@ -204,7 +204,7 @@ def _stop_tx(self): def reader(self): if self.request_banner: try: - self.device.write(b'pRb') + self.device.write(b"pRb") except Exception as e: logger(__name__).exception(e) try: @@ -212,25 +212,25 @@ def reader(self): data = self.device.read() if not data: continue - if data[0] == b'sout': + if data[0] == b"sout": text = decode_bytes_to_str(data[1]) - elif data[0] == b'serr': - text = '{}{}{}'.format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) - elif data[0] == b'kdbg': - text = '{}\n\nKERNEL DEBUG:\t{}{}\n'.format( + elif data[0] == b"serr": + text = "{}{}{}".format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) + elif data[0] == b"kdbg": + text = "{}\n\nKERNEL DEBUG:\t{}{}\n".format( colorama.Back.GREEN + colorama.Style.BRIGHT, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL, ) - elif data[0] != b'': - text = '{}{}'.format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) + elif data[0] != b"": + text = "{}{}".format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) else: text = "{}".format(decode_bytes_to_str(data[1])) self.console.write(text) except UnicodeError as e: logger(__name__).exception(e) except PortConnectionException: - logger(__name__).warning(f'Connection to {self.device.name} broken') + logger(__name__).warning(f"Connection to {self.device.name} broken") if not self.alive.is_set(): self.stop() except Exception as e: @@ -239,7 +239,7 @@ def reader(self): else: logger(__name__).debug(e) self.stop() - logger(__name__).info('Terminal receiver dying') + logger(__name__).info("Terminal receiver dying") def transmitter(self): try: @@ -247,14 +247,14 @@ def transmitter(self): try: c = self.console.getkey() except KeyboardInterrupt: - c = '\x03' + c = "\x03" if self.alive.is_set(): break - if c == '\x03' or not self.no_sigint: + if c == "\x03" or not self.no_sigint: self.stop() break else: - self.device.write(c.encode(encoding='utf-8')) + self.device.write(c.encode(encoding="utf-8")) self.console.write(c) except Exception as e: if not self.alive.is_set(): @@ -262,7 +262,7 @@ def transmitter(self): else: logger(__name__).debug(e) self.stop() - logger(__name__).info('Terminal transmitter dying') + logger(__name__).info("Terminal transmitter dying") def catch_sigint(self): self.no_sigint = False @@ -277,13 +277,13 @@ def start(self): def stop(self, *args): self.console.cleanup() if not self.alive.is_set(): - logger(__name__).warning('Stopping terminal') + logger(__name__).warning("Stopping terminal") self.alive.set() self.device.destroy() if threading.current_thread() != self.transmitter_thread and self.transmitter_thread.is_alive(): self.console.cleanup() self.console.cancel() - logger(__name__).info('All done!') + logger(__name__).info("All done!") def join(self): try: diff --git a/pros/upgrade/__init__.py b/pros/upgrade/__init__.py index 9794ad32..4c546227 100644 --- a/pros/upgrade/__init__.py +++ b/pros/upgrade/__init__.py @@ -5,4 +5,4 @@ def get_platformv2(): return UpgradeManifestV2().platform -__all__ = ['UpgradeManager', 'get_platformv2'] +__all__ = ["UpgradeManager", "get_platformv2"] diff --git a/pros/upgrade/instructions/__init__.py b/pros/upgrade/instructions/__init__.py index 003b586a..452e2915 100644 --- a/pros/upgrade/instructions/__init__.py +++ b/pros/upgrade/instructions/__init__.py @@ -3,4 +3,4 @@ from .explorer_instructions import ExplorerInstruction from .nothing_instructions import NothingInstruction -__all__ = ['UpgradeInstruction', 'UpgradeResult', 'NothingInstruction', 'ExplorerInstruction', 'DownloadInstruction'] +__all__ = ["UpgradeInstruction", "UpgradeResult", "NothingInstruction", "ExplorerInstruction", "DownloadInstruction"] diff --git a/pros/upgrade/instructions/download_instructions.py b/pros/upgrade/instructions/download_instructions.py index 666456c6..7a428c8c 100644 --- a/pros/upgrade/instructions/download_instructions.py +++ b/pros/upgrade/instructions/download_instructions.py @@ -11,7 +11,7 @@ class DownloadInstruction(UpgradeInstruction): Downloads a file """ - def __init__(self, url='', extension=None, download_description=None, success_explanation=None): + def __init__(self, url="", extension=None, download_description=None, success_explanation=None): self.url: str = url self.extension: Optional[str] = extension self.download_description: Optional[str] = download_description @@ -23,15 +23,15 @@ def perform_upgrade(self) -> UpgradeResult: file = download_file(self.url, ext=self.extension, desc=self.download_description) assert file except (AssertionError, IOError) as e: - return UpgradeResult(False, explanation=f'Failed to download required file. ({e})', exception=e) + return UpgradeResult(False, explanation=f"Failed to download required file. ({e})", exception=e) if self.success_explanation: - explanation = self.success_explanation.replace('//FILE\\\\', file).replace( - '//SHORT\\\\', os.path.split(file)[1] + explanation = self.success_explanation.replace("//FILE\\\\", file).replace( + "//SHORT\\\\", os.path.split(file)[1] ) else: - explanation = f'Downloaded {os.path.split(file)[1]}' + explanation = f"Downloaded {os.path.split(file)[1]}" return UpgradeResult(True, explanation=explanation, file=file, origin=self.url) def __str__(self) -> str: - return 'Download required file.' + return "Download required file." diff --git a/pros/upgrade/instructions/explorer_instructions.py b/pros/upgrade/instructions/explorer_instructions.py index d54666f1..c54748b9 100644 --- a/pros/upgrade/instructions/explorer_instructions.py +++ b/pros/upgrade/instructions/explorer_instructions.py @@ -12,8 +12,8 @@ def perform_upgrade(self) -> UpgradeResult: if result.successful: import click - click.launch(getattr(result, 'file')) + click.launch(getattr(result, "file")) return result def __str__(self) -> str: - return 'Download required file.' + return "Download required file." diff --git a/pros/upgrade/instructions/nothing_instructions.py b/pros/upgrade/instructions/nothing_instructions.py index a3619173..1c11df8c 100644 --- a/pros/upgrade/instructions/nothing_instructions.py +++ b/pros/upgrade/instructions/nothing_instructions.py @@ -3,7 +3,7 @@ class NothingInstruction(UpgradeInstruction): def __str__(self) -> str: - return 'No automated instructions. View release notes for installation instructions.' + return "No automated instructions. View release notes for installation instructions." def perform_upgrade(self) -> UpgradeResult: return UpgradeResult(True) diff --git a/pros/upgrade/manifests/__init__.py b/pros/upgrade/manifests/__init__.py index cc8fb43d..4e58eb16 100644 --- a/pros/upgrade/manifests/__init__.py +++ b/pros/upgrade/manifests/__init__.py @@ -5,4 +5,4 @@ # Order of files manifests = [UpgradeManifestV2, UpgradeManifestV1] # type: List[Type] -__all__ = ['UpgradeManifestV1', 'UpgradeManifestV2', 'manifests', 'PlatformsV2'] +__all__ = ["UpgradeManifestV1", "UpgradeManifestV2", "manifests", "PlatformsV2"] diff --git a/pros/upgrade/manifests/upgrade_manifest_v1.py b/pros/upgrade/manifests/upgrade_manifest_v1.py index 33714ef0..f0187d4a 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v1.py +++ b/pros/upgrade/manifests/upgrade_manifest_v1.py @@ -28,11 +28,11 @@ def describe_update(self) -> str: """ if self.needs_upgrade: return ( - f'There is an update available! {self.version} is the latest version.\n' - f'Go to {self.info_url} to learn more.' + f"There is an update available! {self.version} is the latest version.\n" + f"Go to {self.info_url} to learn more." ) else: - return f'You are up to date. ({self.version})' + return f"You are up to date. ({self.version})" def __str__(self): return self.describe_update() @@ -48,4 +48,4 @@ def perform_upgrade(self) -> UpgradeResult: return UpgradeResult(launch(self.info_url) == 0) def describe_post_install(self, **kwargs) -> str: - return f'Download the latest version from {self.info_url}' + return f"Download the latest version from {self.info_url}" diff --git a/pros/upgrade/manifests/upgrade_manifest_v2.py b/pros/upgrade/manifests/upgrade_manifest_v2.py index 6d4f3581..00ee656f 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v2.py +++ b/pros/upgrade/manifests/upgrade_manifest_v2.py @@ -27,34 +27,34 @@ def __init__(self): super().__init__() self.platform_instructions: Dict[PlatformsV2, UpgradeInstruction] = {} - self._platform: 'PlatformsV2' = None + self._platform: "PlatformsV2" = None self._last_file: Optional[str] = None @property - def platform(self) -> 'PlatformsV2': + def platform(self) -> "PlatformsV2": """ Attempts to detect the current platform type :return: The detected platform type, or Unknown """ if self._platform is not None: return self._platform - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): import _constants - frozen_platform = getattr(_constants, 'FROZEN_PLATFORM_V1', None) + frozen_platform = getattr(_constants, "FROZEN_PLATFORM_V1", None) if isinstance(frozen_platform, str): - if frozen_platform.startswith('Windows86'): + if frozen_platform.startswith("Windows86"): self._platform = PlatformsV2.Windows86 - elif frozen_platform.startswith('Windows64'): + elif frozen_platform.startswith("Windows64"): self._platform = PlatformsV2.Windows64 - elif frozen_platform.startswith('MacOS'): + elif frozen_platform.startswith("MacOS"): self._platform = PlatformsV2.MacOS else: try: from pip._vendor import pkg_resources - results = [p for p in pkg_resources.working_set if p.project_name.startswith('pros-cli')] + results = [p for p in pkg_resources.working_set if p.project_name.startswith("pros-cli")] if any(results): self._platform = PlatformsV2.Pip except ImportError: @@ -70,9 +70,9 @@ def can_perform_upgrade(self) -> bool: def perform_upgrade(self) -> UpgradeResult: instructions: UpgradeInstruction = self.platform_instructions.get(self.platform, NothingInstruction()) logger(__name__).debug(self.__dict__) - logger(__name__).debug(f'Platform: {self.platform}') + logger(__name__).debug(f"Platform: {self.platform}") logger(__name__).debug(instructions.__dict__) return instructions.perform_upgrade() def __repr__(self): - return repr({'platform': self.platform, **self.__dict__}) + return repr({"platform": self.platform, **self.__dict__}) diff --git a/pros/upgrade/upgrade_manager.py b/pros/upgrade/upgrade_manager.py index d0bc3baf..efd39464 100644 --- a/pros/upgrade/upgrade_manager.py +++ b/pros/upgrade/upgrade_manager.py @@ -13,14 +13,14 @@ class ReleaseChannel(Enum): - Stable = 'stable' - Beta = 'beta' + Stable = "stable" + Beta = "beta" class UpgradeManager(Config): def __init__(self, file=None): if file is None: - file = os.path.join(cli_config().directory, 'upgrade.pros.json') + file = os.path.join(cli_config().directory, "upgrade.pros.json") self._last_check: datetime = datetime.min self._manifest: Optional[UpgradeManifestV1] = None self.release_channel: ReleaseChannel = ReleaseChannel.Stable @@ -30,23 +30,23 @@ def __init__(self, file=None): @property def has_stale_manifest(self): if self._manifest is None: - logger(__name__).debug('Upgrade manager\'s manifest is nonexistent') + logger(__name__).debug("Upgrade manager's manifest is nonexistent") if datetime.now() - self._last_check > cli_config().update_frequency: - logger(__name__).debug(f'Upgrade manager\'s last check occured at {self._last_check}.') - logger(__name__).debug(f'Was longer ago than update frequency ({cli_config().update_frequency}) allows.') + logger(__name__).debug(f"Upgrade manager's last check occured at {self._last_check}.") + logger(__name__).debug(f"Was longer ago than update frequency ({cli_config().update_frequency}) allows.") return (self._manifest is None) or (datetime.now() - self._last_check > cli_config().update_frequency) def get_manifest(self, force: bool = False) -> UpgradeManifestV1: if not force and not self.has_stale_manifest: return self._manifest - ui.echo('Fetching upgrade manifest...') + ui.echo("Fetching upgrade manifest...") import json import jsonpickle import requests - channel_url = f'https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}' + channel_url = f"https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}" self._manifest = None manifest_urls = [f"{channel_url}/{manifest.__name__}.json" for manifest in manifests] @@ -60,13 +60,13 @@ def get_manifest(self, force: bool = False) -> UpgradeManifestV1: self.save() break except json.decoder.JSONDecodeError as e: - logger(__name__).warning(f'Failed to decode {manifest_url}') + logger(__name__).warning(f"Failed to decode {manifest_url}") logger(__name__).debug(e) else: - logger(__name__).debug(f'Failed to get {manifest_url} ({resp.status_code})') + logger(__name__).debug(f"Failed to get {manifest_url} ({resp.status_code})") if not self._manifest: manifest_list = "\n".join(manifest_urls) - logger(__name__).warning(f'Could not access any upgrade manifests from any of:\n{manifest_list}') + logger(__name__).warning(f"Could not access any upgrade manifests from any of:\n{manifest_list}") return self._manifest @property diff --git a/setup.py b/setup.py index b4cd07b8..6785b4e2 100644 --- a/setup.py +++ b/setup.py @@ -5,14 +5,14 @@ from install_requires import install_requires as install_reqs setup( - name='pros-cli', - version=open('pip_version').read().strip(), + name="pros-cli", + version=open("pip_version").read().strip(), packages=find_packages(), - url='https://github.com/purduesigbots/pros-cli', - license='MPL-2.0', - author='Purdue ACM SIGBots', - author_email='pros_development@cs.purdue.edu', - description='Command Line Interface for managing PROS projects', + url="https://github.com/purduesigbots/pros-cli", + license="MPL-2.0", + author="Purdue ACM SIGBots", + author_email="pros_development@cs.purdue.edu", + description="Command Line Interface for managing PROS projects", install_requires=install_reqs, - entry_points={'console_scripts': ['pros=pros.cli.main:main', 'prosv5=pros.cli.main:main']}, + entry_points={"console_scripts": ["pros=pros.cli.main:main", "prosv5=pros.cli.main:main"]}, ) diff --git a/version.py b/version.py index 658e3a4a..0523dbdf 100644 --- a/version.py +++ b/version.py @@ -3,38 +3,38 @@ from sys import stdout try: - with open(os.devnull, 'w') as devnull: + with open(os.devnull, "w") as devnull: v = ( - subprocess.check_output(['git', 'describe', '--tags', '--dirty', '--abbrev'], stderr=stdout) + subprocess.check_output(["git", "describe", "--tags", "--dirty", "--abbrev"], stderr=stdout) .decode() .strip() ) - if '-' in v: - bv = v[: v.index('-')] - bv = bv[: bv.rindex('.') + 1] + str(int(bv[bv.rindex('.') + 1 :]) + 1) - sempre = 'dirty' if v.endswith('-dirty') else 'commit' - pippre = 'alpha' if v.endswith('-dirty') else 'pre' - build = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip() + if "-" in v: + bv = v[: v.index("-")] + bv = bv[: bv.rindex(".") + 1] + str(int(bv[bv.rindex(".") + 1 :]) + 1) + sempre = "dirty" if v.endswith("-dirty") else "commit" + pippre = "alpha" if v.endswith("-dirty") else "pre" + build = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode().strip() number_since = ( - subprocess.check_output(['git', 'rev-list', v[: v.index('-')] + '..HEAD', '--count']).decode().strip() + subprocess.check_output(["git", "rev-list", v[: v.index("-")] + "..HEAD", "--count"]).decode().strip() ) - semver = bv + '-' + sempre + '+' + build + semver = bv + "-" + sempre + "+" + build pipver = bv + pippre + number_since - winver = v[: v.index('-')] + '.' + number_since + winver = v[: v.index("-")] + "." + number_since else: semver = v pipver = v - winver = v + '.0' + winver = v + ".0" - with open('version', 'w') as f: - print('Semantic version is ' + semver) + with open("version", "w") as f: + print("Semantic version is " + semver) f.write(semver) - with open('pip_version', 'w') as f: - print('PIP version is ' + pipver) + with open("pip_version", "w") as f: + print("PIP version is " + pipver) f.write(pipver) - with open('win_version', 'w') as f: - print('Windows version is ' + winver) + with open("win_version", "w") as f: + print("Windows version is " + winver) f.write(winver) except Exception as e: - print('Error calling git') + print("Error calling git") print(e)