diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a903ec32..3ea5f6b1 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -48,11 +48,11 @@ jobs: # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. - + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality - + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild @@ -61,7 +61,7 @@ jobs: # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - # If the Autobuild fails above, remove it and uncomment the following three lines. + # If the Autobuild fails above, remove it and uncomment the following three lines. # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # - run: | diff --git a/.github/workflows/lint-format.yml b/.github/workflows/lint-format.yml new file mode 100644 index 00000000..709c5f70 --- /dev/null +++ b/.github/workflows/lint-format.yml @@ -0,0 +1,54 @@ +name: Formatter + +on: + pull_request: + push: + +env: + SKIP: pylint,isort,black + +jobs: + Formatter: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4.1.1 + with: + ref: ${{ github.head_ref }} + - uses: isort/isort-action@v1.1.0 + with: + configuration: "--settings-file=.isort.cfg" + requirements-files: "requirements.txt" + - uses: psf/black@stable + with: + options: "--line-length=120" + version: "24.3.0" + - uses: stefanzweifel/git-auto-commit-action@v5.0.0 + with: + commit_message: "Format code with isort and black" + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5.0.0 + - uses: pre-commit/action@v3.0.0 + Pylint: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v4.1.1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5.0.0 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Analysing the code with pylint + run: | + pylint $(git ls-files '*.py') --rcfile=$(git ls-files '.pylintrc') diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 94663b5c..76145e6b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -33,24 +33,24 @@ jobs: - uses: actions/checkout@v3.1.0 with: fetch-depth: 0 - + - name: Setup Python uses: actions/setup-python@v4.3.0 with: python-version: 3.9 cache: 'pip' if: matrix.os != 'macos-latest' - + - name: Setup Python MacOS run: | wget https://www.python.org/ftp/python/3.10.11/python-3.10.11-macos11.pkg sudo installer -verbose -pkg ./python-3.10.11-macos11.pkg -target / echo "/Library/Frameworks/Python.framework/Versions/3.10/bin" >> $GITHUB_PATH if: matrix.os == 'macos-latest' - + - name: Install Requirements run: python3 -m pip install --upgrade pip && pip3 install wheel && pip3 install -r requirements.txt && pip3 uninstall -y typing - + - name: Build Wheel run: python3 setup.py bdist_wheel if: matrix.os == 'ubuntu-latest' @@ -61,7 +61,7 @@ jobs: name: pros-cli-wheel-${{needs.update_build_number.outputs.output1}} path: dist/* if: matrix.os == 'ubuntu-latest' - + - name: Run Pyinstaller run: | python3 version.py @@ -80,7 +80,7 @@ jobs: pyinstaller --onefile pros/cli/compile_commands/intercept-cc.py --name=intercept-cc --target-arch=universal2 pyinstaller --onefile pros/cli/compile_commands/intercept-cc.py --name=intercept-c++ --target-arch=universal2 if: matrix.os == 'macos-latest' - + - name: Package Everything Up shell: bash run: | diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000..d555e8fd --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,5 @@ +[settings] + +line_length = 120 + +profile = black diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..5fc5f6a4 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,34 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: mixed-line-ending + args: [--fix=lf] + - id: end-of-file-fixer + - id: check-yaml + - id: check-vcs-permalinks + - id: check-merge-conflict + - id: check-case-conflict + - id: check-ast + - id: trailing-whitespace + - id: requirements-txt-fixer + - repo: https://github.com/pycqa/isort + rev: "5.13.2" + hooks: + - id: isort + name: isort + types: [python] + args: [--settings-file=.isort.cfg] + - repo: https://github.com/psf/black + rev: 24.3.0 + hooks: + - id: black + args: ["--line-length=120"] + - repo: local + hooks: + - id: pylint + name: pylint + entry: python -m pylint + language: system + types: [python] + args: [--rcfile=.pylintrc] diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..18de6cf1 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,9 @@ +[MASTER] + +max-line-length = 120 +disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120, E1123, C0209, R1710, W0621, C0121, + W0614, W0401, W1202, C0117, W0718, R0205, R0402, R0914, R1725, R1735, C0411, W0237, W0702, W0223, W0613, + W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, + E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, + W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, + W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, R0401, R1731, W0612 diff --git a/install_requires.py b/install_requires.py index 6aad2a80..e77dd742 100644 --- a/install_requires.py +++ b/install_requires.py @@ -1,2 +1,2 @@ -with open('requirements.txt') as reqs: +with open("requirements.txt") as reqs: install_requires = [req.strip() for req in reqs.readlines()] diff --git a/pip_version b/pip_version index e5b82034..1545d966 100644 --- a/pip_version +++ b/pip_version @@ -1 +1 @@ -3.5.0 \ No newline at end of file +3.5.0 diff --git a/pros/cli/build.py b/pros/cli/build.py index 9ed2a742..fcf512d9 100644 --- a/pros/cli/build.py +++ b/pros/cli/build.py @@ -5,8 +5,9 @@ import click import pros.conductor as c -from pros.ga.analytics import analytics from pros.cli.common import default_options, logger, project_option, pros_root, shadow_command +from pros.ga.analytics import analytics + from .upload import upload @@ -15,9 +16,9 @@ def build_cli(): pass -@build_cli.command(aliases=['build','m']) +@build_cli.command(aliases=["build", "m"]) @project_option() -@click.argument('build-args', nargs=-1) +@click.argument("build-args", nargs=-1) @default_options def make(project: c.Project, build_args): """ @@ -26,17 +27,17 @@ def make(project: c.Project, build_args): analytics.send("make") exit_code = project.compile(build_args) if exit_code != 0: - if sys.platform == 'win32': + if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code -@build_cli.command('make-upload', aliases=['mu'], hidden=True) -@click.option('build_args', '--make', '-m', multiple=True, help='Send arguments to make (e.g. compile target)') +@build_cli.command("make-upload", aliases=["mu"], hidden=True) +@click.option("build_args", "--make", "-m", multiple=True, help="Send arguments to make (e.g. compile target)") @shadow_command(upload) @project_option() @click.pass_context @@ -46,41 +47,45 @@ def make_upload(ctx, project: c.Project, build_args: List[str], **upload_args): ctx.invoke(upload, project=project, **upload_args) -@build_cli.command('make-upload-terminal', aliases=['mut'], hidden=True) -@click.option('build_args', '--make', '-m', multiple=True, help='Send arguments to make (e.g. compile target)') +@build_cli.command("make-upload-terminal", aliases=["mut"], hidden=True) +@click.option("build_args", "--make", "-m", multiple=True, help="Send arguments to make (e.g. compile target)") @shadow_command(upload) @project_option() @click.pass_context def make_upload_terminal(ctx, project: c.Project, build_args, **upload_args): analytics.send("make-upload-terminal") from .terminal import terminal + ctx.invoke(make, project=project, build_args=build_args) ctx.invoke(upload, project=project, **upload_args) ctx.invoke(terminal, port=project.target, request_banner=False) -@build_cli.command('build-compile-commands', hidden=True) +@build_cli.command("build-compile-commands", hidden=True) @project_option() -@click.option('--suppress-output/--show-output', 'suppress_output', default=False, show_default=True, - help='Suppress output') -@click.option('--compile-commands', type=click.File('w'), default=None) -@click.option('--sandbox', default=False, is_flag=True) -@click.argument('build-args', nargs=-1) +@click.option( + "--suppress-output/--show-output", "suppress_output", default=False, show_default=True, help="Suppress output" +) +@click.option("--compile-commands", type=click.File("w"), default=None) +@click.option("--sandbox", default=False, is_flag=True) +@click.argument("build-args", nargs=-1) @default_options -def build_compile_commands(project: c.Project, suppress_output: bool, compile_commands, sandbox: bool, - build_args: List[str]): +def build_compile_commands( + project: c.Project, suppress_output: bool, compile_commands, sandbox: bool, build_args: List[str] +): """ Build a compile_commands.json compatible with cquery :return: """ analytics.send("build-compile-commands") - exit_code = project.make_scan_build(build_args, cdb_file=compile_commands, suppress_output=suppress_output, - sandbox=sandbox) + exit_code = project.make_scan_build( + build_args, cdb_file=compile_commands, suppress_output=suppress_output, sandbox=sandbox + ) if exit_code != 0: - if sys.platform == 'win32': + if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code diff --git a/pros/cli/click_classes.py b/pros/cli/click_classes.py index b071c938..6eeda9de 100644 --- a/pros/cli/click_classes.py +++ b/pros/cli/click_classes.py @@ -1,11 +1,12 @@ from collections import defaultdict from typing import * -from rich_click import RichCommand import click.decorators from click import ClickException -from pros.conductor.project import Project as p +from rich_click import RichCommand + from pros.common.utils import get_version +from pros.conductor.project import Project as p class PROSFormatted(RichCommand): @@ -19,9 +20,9 @@ def __init__(self, *args, hidden: bool = False, **kwargs): def format_commands(self, ctx, formatter): """Extra format methods for multi methods that adds all the commands - after the options. - """ - if not hasattr(self, 'list_commands'): + after the options. + """ + if not hasattr(self, "list_commands"): return rows = [] for subcommand in self.list_commands(ctx): @@ -29,14 +30,14 @@ def format_commands(self, ctx, formatter): # What is this, the tool lied about a command. Ignore it if cmd is None: continue - if hasattr(cmd, 'hidden') and cmd.hidden: + if hasattr(cmd, "hidden") and cmd.hidden: continue - help = cmd.short_help or '' + help = cmd.short_help or "" rows.append((subcommand, help)) if rows: - with formatter.section('Commands'): + with formatter.section("Commands"): formatter.write_dl(rows) def format_options(self, ctx, formatter): @@ -45,15 +46,15 @@ def format_options(self, ctx, formatter): for param in self.get_params(ctx): rv = param.get_help_record(ctx) if rv is not None: - if hasattr(param, 'group'): + if hasattr(param, "group"): opts[param.group].append(rv) else: - opts['Options'].append(rv) + opts["Options"].append(rv) - if len(opts['Options']) > 0: - with formatter.section('Options'): - formatter.write_dl(opts['Options']) - opts.pop('Options') + if len(opts["Options"]) > 0: + with formatter.section("Options"): + formatter.write_dl(opts["Options"]) + opts.pop("Options") for group, options in opts.items(): with formatter.section(group): @@ -61,6 +62,7 @@ def format_options(self, ctx, formatter): self.format_commands(ctx, formatter) + class PROSCommand(PROSFormatted, click.Command): pass @@ -77,29 +79,31 @@ def __init__(self, *args, hidden: bool = False, group: str = None, **kwargs): self.group = group def get_help_record(self, ctx): - if hasattr(self, 'hidden') and self.hidden: + if hasattr(self, "hidden") and self.hidden: return return super().get_help_record(ctx) + class PROSDeprecated(click.Option): def __init__(self, *args, replacement: str = None, **kwargs): - kwargs['help'] = "This option has been deprecated." - if not replacement==None: - kwargs['help'] += " Its replacement is '--{}'".format(replacement) + kwargs["help"] = "This option has been deprecated." + if not replacement == None: + kwargs["help"] += " Its replacement is '--{}'".format(replacement) super(PROSDeprecated, self).__init__(*args, **kwargs) self.group = "Deprecated" - self.optiontype = "flag" if str(self.type)=="BOOL" else "switch" + self.optiontype = "flag" if str(self.type) == "BOOL" else "switch" self.to_use = replacement - self.arg = args[0][len(args[0])-1] + self.arg = args[0][len(args[0]) - 1] self.msg = "The '{}' {} has been deprecated. Please use '--{}' instead." - if replacement==None: - self.msg = self.msg.split(".")[0]+"." + if replacement == None: + self.msg = self.msg.split(".")[0] + "." def type_cast_value(self, ctx, value): - if not value==self.default: - print("Warning! : "+self.msg.format(self.arg, self.optiontype, self.to_use)+"\n") + if not value == self.default: + print("Warning! : " + self.msg.format(self.arg, self.optiontype, self.to_use) + "\n") return value + class PROSGroup(PROSFormatted, click.Group): def __init__(self, *args, **kwargs): super(PROSGroup, self).__init__(*args, **kwargs) @@ -112,7 +116,7 @@ def decorator(f): for alias in aliases: self.cmd_dict[alias] = f.__name__ if len(args) == 0 else args[0] - cmd = super(PROSGroup, self).command(*args, cls=kwargs.pop('cls', PROSCommand), **kwargs)(f) + cmd = super(PROSGroup, self).command(*args, cls=kwargs.pop("cls", PROSCommand), **kwargs)(f) self.add_command(cmd) return cmd @@ -124,7 +128,7 @@ def group(self, aliases=None, *args, **kwargs): def decorator(f): for alias in aliases: self.cmd_dict[alias] = f.__name__ - cmd = super(PROSGroup, self).group(*args, cls=kwargs.pop('cls', PROSGroup), **kwargs)(f) + cmd = super(PROSGroup, self).group(*args, cls=kwargs.pop("cls", PROSGroup), **kwargs)(f) self.add_command(cmd) return cmd @@ -160,7 +164,7 @@ def invoke(self, *args, **kwargs): except ClickException as e: click.echo("PROS-CLI Version: {}".format(get_version())) isProject = p.find_project("") - if (isProject): #check if there is a project + if isProject: # check if there is a project curr_proj = p() click.echo("PROS-Kernel Version: {}".format(curr_proj.kernel)) - raise e \ No newline at end of file + raise e diff --git a/pros/cli/common.py b/pros/cli/common.py index 6c12fa06..c3dc8a9e 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -1,9 +1,10 @@ import click.core from pros.common.sentry import add_tag -from pros.ga.analytics import analytics -from pros.common.utils import * from pros.common.ui import echo +from pros.common.utils import * +from pros.ga.analytics import analytics + from .click_classes import * @@ -15,16 +16,24 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) if value: logger().setLevel(min(logger().level, logging.INFO)) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.INFO) - logger(__name__).info('Verbose messages enabled') + logger(__name__).info("Verbose messages enabled") return value - return click.option('--verbose', help='Enable verbose output', is_flag=True, is_eager=True, expose_value=False, - callback=callback, cls=PROSOption, group='Standard Options')(f) + return click.option( + "--verbose", + help="Enable verbose output", + is_flag=True, + is_eager=True, + expose_value=False, + callback=callback, + cls=PROSOption, + group="Standard Options", + )(f) def debug_option(f: Union[click.Command, Callable]): @@ -35,18 +44,26 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) if value: logging.getLogger().setLevel(logging.DEBUG) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.DEBUG) - logging.getLogger(__name__).info('Debugging messages enabled') - if logger('pros').isEnabledFor(logging.DEBUG): - logger('pros').debug(f'CLI Version: {get_version()}') + logging.getLogger(__name__).info("Debugging messages enabled") + if logger("pros").isEnabledFor(logging.DEBUG): + logger("pros").debug(f"CLI Version: {get_version()}") return value - return click.option('--debug', help='Enable debugging output', is_flag=True, is_eager=True, expose_value=False, - callback=callback, cls=PROSOption, group='Standard Options')(f) + return click.option( + "--debug", + help="Enable debugging output", + is_flag=True, + is_eager=True, + expose_value=False, + callback=callback, + cls=PROSOption, + group="Standard Options", + )(f) def logging_option(f: Union[click.Command, Callable]): @@ -57,15 +74,23 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) logging.getLogger().setLevel(min(logger().level, value)) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(value) return value - return click.option('-l', '--log', help='Logging level', is_eager=True, expose_value=False, callback=callback, - type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), - cls=PROSOption, group='Standard Options')(f) + return click.option( + "-l", + "--log", + help="Logging level", + is_eager=True, + expose_value=False, + callback=callback, + type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]), + cls=PROSOption, + group="Standard Options", + )(f) def logfile_option(f: Union[click.Command, Callable]): @@ -77,21 +102,27 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value[1], str): level = getattr(logging, value[1].upper(), None) if not isinstance(level, int): - raise ValueError('Invalid log level: {}'.format(value[1])) - handler = logging.FileHandler(value[0], mode='w') - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s' + raise ValueError("Invalid log level: {}".format(value[1])) + handler = logging.FileHandler(value[0], mode="w") + fmt_str = "%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s" handler.setFormatter(logging.Formatter(fmt_str)) handler.setLevel(level) logging.getLogger().addHandler(handler) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.getLogger().level) # pin stdout_handler to its current log level logging.getLogger().setLevel(min(logging.getLogger().level, level)) - return click.option('--logfile', help='Log messages to a file', is_eager=True, expose_value=False, - callback=callback, default=(None, None), - type=click.Tuple( - [click.Path(), click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])] - ), cls=PROSOption, group='Standard Options')(f) + return click.option( + "--logfile", + help="Log messages to a file", + is_eager=True, + expose_value=False, + callback=callback, + default=(None, None), + type=click.Tuple([click.Path(), click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])]), + cls=PROSOption, + group="Standard Options", + )(f) def machine_output_option(f: Union[click.Command, Callable]): @@ -101,60 +132,96 @@ def machine_output_option(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: str): ctx.ensure_object(dict) - add_tag('machine-output', value) # goes in sentry report + add_tag("machine-output", value) # goes in sentry report if value: ctx.obj[param.name] = value logging.getLogger().setLevel(logging.DEBUG) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.DEBUG) - logging.getLogger(__name__).info('Debugging messages enabled') + logging.getLogger(__name__).info("Debugging messages enabled") return value - decorator = click.option('--machine-output', expose_value=False, is_flag=True, default=False, is_eager=True, - help='Enable machine friendly output.', callback=callback, cls=PROSOption, hidden=True)(f) + decorator = click.option( + "--machine-output", + expose_value=False, + is_flag=True, + default=False, + is_eager=True, + help="Enable machine friendly output.", + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator + def no_sentry_option(f: Union[click.Command, Callable]): """ disables the sentry y/N prompt when an error/exception occurs """ + def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-sentry',value) + add_tag("no-sentry", value) if value: pros.common.sentry.disable_prompt() - decorator = click.option('--no-sentry', expose_value=False, is_flag=True, default=True, is_eager=True, - help="Disable sentry reporting prompt.", callback=callback, cls=PROSOption, hidden=True)(f) + + decorator = click.option( + "--no-sentry", + expose_value=False, + is_flag=True, + default=True, + is_eager=True, + help="Disable sentry reporting prompt.", + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator + def no_analytics(f: Union[click.Command, Callable]): """ Don't use analytics for this command """ + def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-analytics',value) + add_tag("no-analytics", value) if value: echo("Not sending analytics for this command.\n") analytics.useAnalytics = False - pass - decorator = click.option('--no-analytics', expose_value=False, is_flag=True, default=False, is_eager=True, - help="Don't send analytics for this command.", callback=callback, cls=PROSOption, hidden=True)(f) + pass + + decorator = click.option( + "--no-analytics", + expose_value=False, + is_flag=True, + default=False, + is_eager=True, + help="Don't send analytics for this command.", + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator + def default_options(f: Union[click.Command, Callable]): """ - combines verbosity, debug, machine output, no analytics, and no sentry options + combines verbosity, debug, machine output, no analytics, and no sentry options """ - decorator = debug_option(verbose_option(logging_option(logfile_option(machine_output_option(no_sentry_option(no_analytics(f))))))) + decorator = debug_option( + verbose_option(logging_option(logfile_option(machine_output_option(no_sentry_option(no_analytics(f)))))) + ) decorator.__name__ = f.__name__ return decorator -def template_query(arg_name='query', required: bool = False): +def template_query(arg_name="query", required: bool = False): """ provides a wrapper for conductor commands which require an optional query @@ -164,15 +231,16 @@ def template_query(arg_name='query', required: bool = False): def callback(ctx: click.Context, param: click.Parameter, value: Tuple[str, ...]): import pros.conductor as c + value = list(value) spec = None - if len(value) > 0 and not value[0].startswith('--'): + if len(value) > 0 and not value[0].startswith("--"): spec = value.pop(0) if not spec and required: - raise ValueError(f'A {arg_name} is required to perform this command') - query = c.BaseTemplate.create_query(spec, - **{value[i][2:]: value[i + 1] for i in - range(0, int(len(value) / 2) * 2, 2)}) + raise ValueError(f"A {arg_name} is required to perform this command") + query = c.BaseTemplate.create_query( + spec, **{value[i][2:]: value[i + 1] for i in range(0, int(len(value) / 2) * 2, 2)} + ) logger(__name__).debug(query) return query @@ -182,28 +250,37 @@ def wrapper(f: Union[click.Command, Callable]): return wrapper -def project_option(arg_name='project', required: bool = True, default: str = '.', allow_none: bool = False): +def project_option(arg_name="project", required: bool = True, default: str = ".", allow_none: bool = False): def callback(ctx: click.Context, param: click.Parameter, value: str): if allow_none and value is None: return None import pros.conductor as c + project_path = c.Project.find_project(value) if project_path is None: if allow_none: return None elif required: - raise click.UsageError(f'{os.path.abspath(value or ".")} is not inside a PROS project. ' - f'Execute this command from within a PROS project or specify it ' - f'with --project project/path') + raise click.UsageError( + f'{os.path.abspath(value or ".")} is not inside a PROS project. ' + f"Execute this command from within a PROS project or specify it " + f"with --project project/path" + ) else: return None return c.Project(project_path) def wrapper(f: Union[click.Command, Callable]): - return click.option(f'--{arg_name}', callback=callback, required=required, - default=default, type=click.Path(exists=True), show_default=True, - help='PROS Project directory or file')(f) + return click.option( + f"--{arg_name}", + callback=callback, + required=required, + default=default, + type=click.Path(exists=True), + show_default=True, + help="PROS Project directory or file", + )(f) return wrapper @@ -213,7 +290,7 @@ def wrapper(f: Union[click.Command, Callable]): if isinstance(f, click.Command): f.params.extend(p for p in command.params if p.name not in [p.name for p in command.params]) else: - if not hasattr(f, '__click_params__'): + if not hasattr(f, "__click_params__"): f.__click_params__ = [] f.__click_params__.extend(p for p in command.params if p.name not in [p.name for p in f.__click_params__]) return f @@ -242,56 +319,67 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl wireless interaction. """ from pros.serial.devices.vex import find_v5_ports + # If a port is specified manually, we'll just assume it's # not a joystick. is_joystick = False if not port: ports = find_v5_ports(type) - logger(__name__).debug('Ports: {}'.format(';'.join([str(p.__dict__) for p in ports]))) + logger(__name__).debug("Ports: {}".format(";".join([str(p.__dict__) for p in ports]))) if len(ports) == 0: if not quiet: - logger(__name__).error('No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('v5'), - extra={'sentry': False}) + logger(__name__).error( + "No {0} ports were found! If you think you have a {0} plugged in, " + "run this command again with the --debug flag".format("v5"), + extra={"sentry": False}, + ) return None, False if len(ports) > 1: if not quiet: - brain_id = click.prompt('Multiple {} Brains were found. Please choose one to upload the program: [{}]' - .format('v5', ' | '.join([p.product.split(' ')[-1] for p in ports])), - default=ports[0].product.split(' ')[-1], - show_default=False, - type=click.Choice([p.description.split(' ')[-1] for p in ports])) - port = [p.device for p in ports if p.description.split(' ')[-1] == brain_id][0] + brain_id = click.prompt( + "Multiple {} Brains were found. Please choose one to upload the program: [{}]".format( + "v5", " | ".join([p.product.split(" ")[-1] for p in ports]) + ), + default=ports[0].product.split(" ")[-1], + show_default=False, + type=click.Choice([p.description.split(" ")[-1] for p in ports]), + ) + port = [p.device for p in ports if p.description.split(" ")[-1] == brain_id][0] assert port in [p.device for p in ports] else: return None, False else: port = ports[0].device - is_joystick = type == 'user' and 'Controller' in ports[0].description - logger(__name__).info('Automatically selected {}'.format(port)) + is_joystick = type == "user" and "Controller" in ports[0].description + logger(__name__).info("Automatically selected {}".format(port)) return port, is_joystick def resolve_cortex_port(port: Optional[str], quiet: bool = False) -> Optional[str]: from pros.serial.devices.vex import find_cortex_ports + if not port: ports = find_cortex_ports() if len(ports) == 0: if not quiet: - logger(__name__).error('No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('cortex'), - extra={'sentry': False}) + logger(__name__).error( + "No {0} ports were found! If you think you have a {0} plugged in, " + "run this command again with the --debug flag".format("cortex"), + extra={"sentry": False}, + ) return None if len(ports) > 1: if not quiet: - port = click.prompt('Multiple {} ports were found. Please choose one: '.format('cortex'), - default=ports[0].device, - type=click.Choice([p.device for p in ports])) + port = click.prompt( + "Multiple {} ports were found. Please choose one: ".format("cortex"), + default=ports[0].device, + type=click.Choice([p.device for p in ports]), + ) assert port in [p.device for p in ports] else: return None else: port = ports[0].device - logger(__name__).info('Automatically selected {}'.format(port)) + logger(__name__).info("Automatically selected {}".format(port)) return port diff --git a/pros/cli/compile_commands/intercept-cc.py b/pros/cli/compile_commands/intercept-cc.py index 66026e54..7c50b48d 100644 --- a/pros/cli/compile_commands/intercept-cc.py +++ b/pros/cli/compile_commands/intercept-cc.py @@ -1,4 +1,4 @@ from libscanbuild.intercept import intercept_compiler_wrapper -if __name__ == '__main__': +if __name__ == "__main__": intercept_compiler_wrapper() diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 00a0ecb5..8a06487e 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -13,7 +13,7 @@ def conductor_cli(): pass -@conductor_cli.group(cls=PROSGroup, aliases=['cond', 'c', 'conduct'], short_help='Perform project management for PROS') +@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): """ @@ -25,8 +25,11 @@ def conductor(): pass -@conductor.command(aliases=['download'], short_help='Fetch/Download a remote template', - context_settings={'ignore_unknown_options': True}) +@conductor.command( + aliases=["download"], + short_help="Fetch/Download a remote template", + context_settings={"ignore_unknown_options": True}, +) @template_query(required=True) @default_options def fetch(query: c.BaseTemplate): @@ -48,54 +51,84 @@ def fetch(query: c.BaseTemplate): template_file = query.identifier elif os.path.exists(query.name) and query.version is None: template_file = query.name - elif query.metadata.get('origin', None) == 'local': - if 'location' not in query.metadata: - logger(__name__).error('--location option is required for the local depot. Specify --location ') - logger(__name__).debug(f'Query options provided: {query.metadata}') + elif query.metadata.get("origin", None) == "local": + if "location" not in query.metadata: + logger(__name__).error("--location option is required for the local depot. Specify --location ") + logger(__name__).debug(f"Query options provided: {query.metadata}") return -1 - template_file = query.metadata['location'] + template_file = query.metadata["location"] - if template_file and (os.path.splitext(template_file)[1] in ['.zip'] or - os.path.exists(os.path.join(template_file, 'template.pros'))): + if template_file and ( + os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) + ): template = ExternalTemplate(template_file) - query.metadata['location'] = template_file + query.metadata["location"] = template_file depot = c.LocalDepot() - logger(__name__).debug(f'Template file found: {template_file}') + logger(__name__).debug(f"Template file found: {template_file}") else: if template_file: - logger(__name__).debug(f'Template file exists but is not a valid template: {template_file}') + logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") else: - logger(__name__).error(f'Template not found: {query.name}') + logger(__name__).error(f"Template not found: {query.name}") return -1 template = c.Conductor().resolve_template(query, allow_offline=False) - logger(__name__).debug(f'Template from resolved query: {template}') + logger(__name__).debug(f"Template from resolved query: {template}") if template is None: - logger(__name__).error(f'There are no templates matching {query}!') + logger(__name__).error(f"There are no templates matching {query}!") return -1 - depot = c.Conductor().get_depot(template.metadata['origin']) - logger(__name__).debug(f'Found depot: {depot}') + depot = c.Conductor().get_depot(template.metadata["origin"]) + logger(__name__).debug(f"Found depot: {depot}") # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine # whether the arguments are for the template or for the depot, so they share them - logger(__name__).debug(f'Additional depot and template args: {query.metadata}') + logger(__name__).debug(f"Additional depot and template args: {query.metadata}") c.Conductor().fetch_template(depot, template, **query.metadata) -@conductor.command(context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=True, help='Allow upgrading templates in a project') - -@click.option('--install/--no-install', 'install_ok', default=True, help='Allow installing templates in a project') -@click.option('--download/--no-download', 'download_ok', default=True, - help='Allow downloading templates or only allow local templates') -@click.option('--upgrade-user-files/--no-upgrade-user-files', 'force_user', default=False, - help='Replace all user files in a template') -@click.option('--force', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='Create a project using the PROS 4 kernel') +@conductor.command(context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") +@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") +@click.option( + "--download/--no-download", + "download_ok", + default=True, + help="Allow downloading templates or only allow local templates", +) +@click.option( + "--upgrade-user-files/--no-upgrade-user-files", + "force_user", + default=False, + help="Replace all user files in a template", +) +@click.option( + "--force", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) @project_option() @template_query(required=True) @default_options @@ -109,17 +142,32 @@ def apply(project: c.Project, query: c.BaseTemplate, **kwargs): return c.Conductor().apply_template(project, identifier=query, **kwargs) -@conductor.command(aliases=['i', 'in'], context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') +@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) @project_option() @template_query(required=True) @default_options @@ -134,19 +182,41 @@ def install(ctx: click.Context, **kwargs): return ctx.invoke(apply, install_ok=True, **kwargs) -@conductor.command(context_settings={'ignore_unknown_options': True}, aliases=['u']) -@click.option('--install/--no-install', 'install_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='Create a project using the PROS 4 kernel') +@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) +@click.option("--install/--no-install", "install_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) @project_option() @template_query(required=False) @default_options @@ -160,115 +230,206 @@ def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwa analytics.send("upgrade-project") if not query.name: for template in project.templates.keys(): - click.secho(f'Upgrading {template}', color='yellow') - q = c.BaseTemplate.create_query(name=template, target=project.target, - supported_kernels=project.templates['kernel'].version) + click.secho(f"Upgrading {template}", color="yellow") + q = c.BaseTemplate.create_query( + name=template, target=project.target, supported_kernels=project.templates["kernel"].version + ) ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) else: ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) -@conductor.command('uninstall') -@click.option('--remove-user', is_flag=True, default=False, help='Also remove user files') -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--no-make-clean', is_flag=True, default=True, help='Do not run make clean after removing') +@conductor.command("uninstall") +@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") @project_option() @template_query() @default_options -def uninstall_template(project: c.Project, query: c.BaseTemplate, remove_user: bool, - remove_empty_directories: bool = False, no_make_clean: bool = False): +def uninstall_template( + project: c.Project, + query: c.BaseTemplate, + remove_user: bool, + remove_empty_directories: bool = False, + no_make_clean: bool = False, +): """ Uninstall a template from a PROS project Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ analytics.send("uninstall-template") - c.Conductor().remove_template(project, query, remove_user=remove_user, - remove_empty_directories=remove_empty_directories) + c.Conductor().remove_template( + project, query, remove_user=remove_user, remove_empty_directories=remove_empty_directories + ) if no_make_clean: with ui.Notification(): project.compile(["clean"]) -@conductor.command('new-project', aliases=['new', 'create-project']) -@click.argument('path', type=click.Path()) -@click.argument('target', default=c.Conductor().default_target, type=click.Choice(['v5', 'cortex'])) -@click.argument('version', default='latest') -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-refresh', is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks') -@click.option('--no-default-libs', 'no_default_libs', default=False, is_flag=True, - help='Do not install any default libraries after creating the project.') -@click.option('--compile-after', is_flag=True, default=True, show_default=True, - help='Compile the project after creation') -@click.option('--build-cache', is_flag=True, default=None, show_default=False, - help='Build compile commands cache after creation. Overrides --compile-after if both are specified.') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='Create a project using the PROS 4 kernel') +@conductor.command("new-project", aliases=["new", "create-project"]) +@click.argument("path", type=click.Path()) +@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) +@click.argument("version", default="latest") +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option( + "--no-default-libs", + "no_default_libs", + default=False, + is_flag=True, + help="Do not install any default libraries after creating the project.", +) +@click.option( + "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" +) +@click.option( + "--build-cache", + is_flag=True, + default=None, + show_default=False, + help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", +) +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) @click.pass_context @default_options -def new_project(ctx: click.Context, path: str, target: str, version: str, - force_user: bool = False, force_system: bool = False, - no_default_libs: bool = False, compile_after: bool = True, build_cache: bool = None, **kwargs): +def new_project( + ctx: click.Context, + path: str, + target: str, + version: str, + force_user: bool = False, + force_system: bool = False, + no_default_libs: bool = False, + compile_after: bool = True, + build_cache: bool = None, + **kwargs, +): """ Create a new PROS project Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ analytics.send("new-project") - version_source = version.lower() == 'latest' - if version.lower() == 'latest' or not version: - version = '>0' + version_source = version.lower() == "latest" + if version.lower() == "latest" or not version: + version = ">0" if not force_system and c.Project.find_project(path) is not None: - logger(__name__).error('A project already exists in this location at ' + c.Project.find_project(path) + - '! Delete it first. Are you creating a project in an existing one?', extra={'sentry': False}) + logger(__name__).error( + "A project already exists in this location at " + + c.Project.find_project(path) + + "! Delete it first. Are you creating a project in an existing one?", + extra={"sentry": False}, + ) ctx.exit(-1) try: _conductor = c.Conductor() if target is None: target = _conductor.default_target - project = _conductor.new_project(path, target=target, version=version, version_source=version_source, - force_user=force_user, force_system=force_system, - no_default_libs=no_default_libs, **kwargs) - ui.echo('New PROS Project was created:', output_machine=False) + project = _conductor.new_project( + path, + target=target, + version=version, + version_source=version_source, + force_user=force_user, + force_system=force_system, + no_default_libs=no_default_libs, + **kwargs, + ) + ui.echo("New PROS Project was created:", output_machine=False) ctx.invoke(info_project, project=project) if compile_after or build_cache: with ui.Notification(): - ui.echo('Building project...') + ui.echo("Building project...") exit_code = project.compile([], scan_build=build_cache) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") except Exception as e: pros.common.logger(__name__).exception(e) ctx.exit(-1) -@conductor.command('query-templates', - aliases=['search-templates', 'ls-templates', 'lstemplates', 'querytemplates', 'searchtemplates', 'q'], - context_settings={'ignore_unknown_options': True}) -@click.option('--allow-offline/--no-offline', 'allow_offline', default=True, show_default=True, - help='(Dis)allow offline templates in the listing') -@click.option('--allow-online/--no-online', 'allow_online', default=True, show_default=True, - help='(Dis)allow online templates in the listing') -@click.option('--force-refresh', is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks') -@click.option('--limit', type=int, default=15, - help='The maximum number of displayed results for each library') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='View a list of early access templates') +@conductor.command( + "query-templates", + aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates", "q"], + context_settings={"ignore_unknown_options": True}, +) +@click.option( + "--allow-offline/--no-offline", + "allow_offline", + default=True, + show_default=True, + help="(Dis)allow offline templates in the listing", +) +@click.option( + "--allow-online/--no-online", + "allow_online", + default=True, + show_default=True, + help="(Dis)allow online templates in the listing", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="View a list of early access templates", +) @template_query(required=False) @project_option(required=False) @click.pass_context @default_options -def query_templates(ctx, project: Optional[c.Project], query: c.BaseTemplate, allow_offline: bool, allow_online: bool, force_refresh: bool, - limit: int, early_access: bool): +def query_templates( + ctx, + project: Optional[c.Project], + query: c.BaseTemplate, + allow_offline: bool, + allow_online: bool, + force_refresh: bool, + limit: int, + early_access: bool, +): """ Query local and remote templates based on a spec @@ -279,34 +440,42 @@ def query_templates(ctx, project: Optional[c.Project], query: c.BaseTemplate, al limit = 15 if early_access is None and project is not None: early_access = project.use_early_access - templates = c.Conductor().resolve_templates(query, allow_offline=allow_offline, allow_online=allow_online, - force_refresh=force_refresh, early_access=early_access) + templates = c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=early_access, + ) render_templates = {} for template in templates: key = (template.identifier, template.origin) if key in render_templates: if isinstance(template, c.LocalTemplate): - render_templates[key]['local'] = True + render_templates[key]["local"] = True else: render_templates[key] = { - 'name': template.name, - 'version': template.version, - 'location': template.origin, - 'target': template.target, - 'local': isinstance(template, c.LocalTemplate) + "name": template.name, + "version": template.version, + "location": template.origin, + "target": template.target, + "local": isinstance(template, c.LocalTemplate), } import semantic_version as semver - render_templates = sorted(render_templates.values(), key=lambda k: (k['name'], semver.Version(k['version']), k['local']), reverse=True) + + render_templates = sorted( + render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True + ) # Impose the output limit for each library's templates output_templates = [] - for _, g in groupby(render_templates, key=lambda t: t['name'] + t['target']): + for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): output_templates += list(g)[:limit] - ui.finalize('template-query', output_templates) + ui.finalize("template-query", output_templates) -@conductor.command('info-project') -@click.option('--ls-upgrades/--no-ls-upgrades', 'ls_upgrades', default=False) +@conductor.command("info-project") +@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) @project_option() @default_options def info_project(project: c.Project, ls_upgrades): @@ -315,23 +484,28 @@ def info_project(project: c.Project, ls_upgrades): Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ - analytics.send("info-project") + analytics.send("info-project") from pros.conductor.project import ProjectReport + report = ProjectReport(project) _conductor = c.Conductor() if ls_upgrades: - for template in report.project['templates']: + for template in report.project["templates"]: import semantic_version as semver - templates = _conductor.resolve_templates(c.BaseTemplate.create_query(name=template["name"], - version=f'>{template["version"]}', - target=project.target)) + + templates = _conductor.resolve_templates( + c.BaseTemplate.create_query( + name=template["name"], version=f'>{template["version"]}', target=project.target + ) + ) template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) - ui.finalize('project-report', report) + ui.finalize("project-report", report) + -@conductor.command('add-depot') -@click.argument('name') -@click.argument('url') +@conductor.command("add-depot") +@click.argument("name") +@click.argument("url") @default_options def add_depot(name: str, url: str): """ @@ -344,8 +518,9 @@ def add_depot(name: str, url: str): ui.echo(f"Added depot {name} from {url}") -@conductor.command('remove-depot') -@click.argument('name') + +@conductor.command("remove-depot") +@click.argument("name") @default_options def remove_depot(name: str): """ @@ -358,8 +533,9 @@ def remove_depot(name: str): ui.echo(f"Removed depot {name}") -@conductor.command('query-depots') -@click.option('--url', is_flag=True) + +@conductor.command("query-depots") +@click.option("--url", is_flag=True) @default_options def query_depots(url: bool): """ @@ -369,10 +545,11 @@ def query_depots(url: bool): """ _conductor = c.Conductor() ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo('\n'.join(_conductor.query_depots(url))+"\n") + ui.echo("\n".join(_conductor.query_depots(url)) + "\n") -@conductor.command('reset') -@click.option('--force', is_flag=True, default=False, help='Force reset') + +@conductor.command("reset") +@click.option("--force", is_flag=True, default=False, help="Force reset") @default_options def reset(force: bool): """ @@ -382,12 +559,14 @@ def reset(force: bool): """ if not force: - if not ui.confirm("This will remove all depots and templates. You will be unable to create a new PROS project if you do not have internet connection. Are you sure you want to continue?"): + if not ui.confirm( + "This will remove all depots and templates. You will be unable to create a new PROS project if you do not have internet connection. Are you sure you want to continue?" + ): ui.echo("Aborting") return - + # Delete conductor.pros - file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') + file = os.path.join(click.get_app_dir("PROS"), "conductor.pros") if os.path.exists(file): os.remove(file) diff --git a/pros/cli/conductor_utils.py b/pros/cli/conductor_utils.py index cb22cffc..4f306030 100644 --- a/pros/cli/conductor_utils.py +++ b/pros/cli/conductor_utils.py @@ -6,28 +6,39 @@ from typing import * import click + import pros.common.ui as ui import pros.conductor as c from pros.common.utils import logger from pros.conductor.templates import ExternalTemplate from pros.ga.analytics import analytics + from .common import default_options, template_query from .conductor import conductor -@conductor.command('create-template', context_settings={'allow_extra_args': True, 'ignore_unknown_options': True}) -@click.argument('path', type=click.Path(exists=True)) -@click.argument('name') -@click.argument('version') -@click.option('--system', 'system_files', multiple=True, type=click.Path(), - help='Specify "system" files required by the template') -@click.option('--user', 'user_files', multiple=True, type=click.Path(), - help='Specify files that are intended to be modified by users') -@click.option('--kernels', 'supported_kernels', help='Specify supported kernels') -@click.option('--target', type=click.Choice(['v5', 'cortex']), help='Specify the target platform (cortex or v5)') -@click.option('--destination', type=click.Path(), - help='Specify an alternate destination for the created ZIP file or template descriptor') -@click.option('--zip/--no-zip', 'do_zip', default=True, help='Create a ZIP file or create a template descriptor.') +@conductor.command("create-template", context_settings={"allow_extra_args": True, "ignore_unknown_options": True}) +@click.argument("path", type=click.Path(exists=True)) +@click.argument("name") +@click.argument("version") +@click.option( + "--system", "system_files", multiple=True, type=click.Path(), help='Specify "system" files required by the template' +) +@click.option( + "--user", + "user_files", + multiple=True, + type=click.Path(), + help="Specify files that are intended to be modified by users", +) +@click.option("--kernels", "supported_kernels", help="Specify supported kernels") +@click.option("--target", type=click.Choice(["v5", "cortex"]), help="Specify the target platform (cortex or v5)") +@click.option( + "--destination", + type=click.Path(), + help="Specify an alternate destination for the created ZIP file or template descriptor", +) +@click.option("--zip/--no-zip", "do_zip", default=True, help="Create a ZIP file or create a template descriptor.") @default_options @click.pass_context def create_template(ctx, path: str, destination: str, do_zip: bool, **kwargs): @@ -60,112 +71,114 @@ def create_template(ctx, path: str, destination: str, do_zip: bool, **kwargs): if project: project = c.Project(project) path = project.location - if not kwargs['supported_kernels'] and kwargs['name'] != 'kernel': - kwargs['supported_kernels'] = f'^{project.kernel}' - kwargs['target'] = project.target + if not kwargs["supported_kernels"] and kwargs["name"] != "kernel": + kwargs["supported_kernels"] = f"^{project.kernel}" + kwargs["target"] = project.target if not destination: if os.path.isdir(path): destination = path else: destination = os.path.dirname(path) - kwargs['system_files'] = list(kwargs['system_files']) - kwargs['user_files'] = list(kwargs['user_files']) - kwargs['metadata'] = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, int(len(ctx.args) / 2) * 2, 2)} + kwargs["system_files"] = list(kwargs["system_files"]) + kwargs["user_files"] = list(kwargs["user_files"]) + kwargs["metadata"] = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, int(len(ctx.args) / 2) * 2, 2)} def get_matching_files(globs: List[str]) -> Set[str]: matching_files: List[str] = [] _path = os.path.normpath(path) + os.path.sep for g in [g for g in globs if glob.has_magic(g)]: - files = glob.glob(f'{path}/{g}', recursive=True) + files = glob.glob(f"{path}/{g}", recursive=True) files = filter(lambda f: os.path.isfile(f), files) files = [os.path.normpath(os.path.normpath(f).split(_path)[-1]) for f in files] matching_files.extend(files) # matches things like src/opcontrol.{c,cpp} so that we can expand to src/opcontrol.c and src/opcontrol.cpp - pattern = re.compile(r'^([\w{}]+.){{((?:\w+,)*\w+)}}$'.format(os.path.sep.replace('\\', '\\\\'))) + pattern = re.compile(r"^([\w{}]+.){{((?:\w+,)*\w+)}}$".format(os.path.sep.replace("\\", "\\\\"))) for f in [os.path.normpath(f) for f in globs if not glob.has_magic(f)]: if re.match(pattern, f): matches = re.split(pattern, f) - logger(__name__).debug(f'Matches on {f}: {matches}') - matching_files.extend([f'{matches[1]}{ext}' for ext in matches[2].split(',')]) + logger(__name__).debug(f"Matches on {f}: {matches}") + matching_files.extend([f"{matches[1]}{ext}" for ext in matches[2].split(",")]) else: matching_files.append(f) matching_files: Set[str] = set(matching_files) return matching_files - matching_system_files: Set[str] = get_matching_files(kwargs['system_files']) - matching_user_files: Set[str] = get_matching_files(kwargs['user_files']) + matching_system_files: Set[str] = get_matching_files(kwargs["system_files"]) + matching_user_files: Set[str] = get_matching_files(kwargs["user_files"]) matching_system_files: Set[str] = matching_system_files - matching_user_files # exclude existing project.pros and template.pros from the template, # and name@*.zip so that we don't redundantly include ZIPs - exclude_files = {'project.pros', 'template.pros', *get_matching_files([f"{kwargs['name']}@*.zip"])} + exclude_files = {"project.pros", "template.pros", *get_matching_files([f"{kwargs['name']}@*.zip"])} if project: exclude_files = exclude_files.union(project.list_template_files()) matching_system_files = matching_system_files - exclude_files matching_user_files = matching_user_files - exclude_files def filename_remap(file_path: str) -> str: - if os.path.dirname(file_path) == 'bin': - return file_path.replace('bin', 'firmware', 1) + if os.path.dirname(file_path) == "bin": + return file_path.replace("bin", "firmware", 1) return file_path - kwargs['system_files'] = list(map(filename_remap, matching_system_files)) - kwargs['user_files'] = list(map(filename_remap, matching_user_files)) + kwargs["system_files"] = list(map(filename_remap, matching_system_files)) + kwargs["user_files"] = list(map(filename_remap, matching_user_files)) if do_zip: - if not os.path.isdir(destination) and os.path.splitext(destination)[-1] != '.zip': - logger(__name__).error(f'{destination} must be a zip file or an existing directory.') + if not os.path.isdir(destination) and os.path.splitext(destination)[-1] != ".zip": + logger(__name__).error(f"{destination} must be a zip file or an existing directory.") return -1 with tempfile.TemporaryDirectory() as td: - template = ExternalTemplate(file=os.path.join(td, 'template.pros'), **kwargs) + template = ExternalTemplate(file=os.path.join(td, "template.pros"), **kwargs) template.save() if os.path.isdir(destination): - destination = os.path.join(destination, f'{template.identifier}.zip') - with zipfile.ZipFile(destination, mode='w') as z: - z.write(template.save_file, arcname='template.pros') + destination = os.path.join(destination, f"{template.identifier}.zip") + with zipfile.ZipFile(destination, mode="w") as z: + z.write(template.save_file, arcname="template.pros") for file in matching_user_files: source_path = os.path.join(path, file) dest_file = filename_remap(file) if os.path.exists(source_path): - ui.echo(f'U: {file}' + (f' -> {dest_file}' if file != dest_file else '')) - z.write(f'{path}/{file}', arcname=dest_file) + ui.echo(f"U: {file}" + (f" -> {dest_file}" if file != dest_file else "")) + z.write(f"{path}/{file}", arcname=dest_file) for file in matching_system_files: source_path = os.path.join(path, file) dest_file = filename_remap(file) if os.path.exists(source_path): - ui.echo(f'S: {file}' + (f' -> {dest_file}' if file != dest_file else '')) - z.write(f'{path}/{file}', arcname=dest_file) + ui.echo(f"S: {file}" + (f" -> {dest_file}" if file != dest_file else "")) + z.write(f"{path}/{file}", arcname=dest_file) else: if os.path.isdir(destination): - destination = os.path.join(destination, 'template.pros') + destination = os.path.join(destination, "template.pros") template = ExternalTemplate(file=destination, **kwargs) template.save() -@conductor.command('purge-template', help='Purge template(s) from the local cache', - context_settings={'ignore_unknown_options': True}) -@click.option('-f', '--force', is_flag=True, default=False, help='Do not prompt for removal of multiple templates') +@conductor.command( + "purge-template", help="Purge template(s) from the local cache", context_settings={"ignore_unknown_options": True} +) +@click.option("-f", "--force", is_flag=True, default=False, help="Do not prompt for removal of multiple templates") @template_query(required=False) @default_options def purge_template(query: c.BaseTemplate, force): analytics.send("purge-template") if not query: - force = click.confirm('Are you sure you want to remove all cached templates? This action is non-reversable!', - abort=True) + force = click.confirm( + "Are you sure you want to remove all cached templates? This action is non-reversable!", abort=True + ) cond = c.Conductor() templates = cond.resolve_templates(query, allow_online=False) beta_templates = cond.resolve_templates(query, allow_online=False, beta=True) if len(templates) == 0: - click.echo('No matching templates were found matching the spec.') + click.echo("No matching templates were found matching the spec.") return 0 t_list = [t.identifier for t in templates] + [t.identifier for t in beta_templates] - click.echo(f'The following template(s) will be removed {t_list}') + click.echo(f"The following template(s) will be removed {t_list}") if len(templates) > 1 and not force: - click.confirm(f'Are you sure you want to remove multiple templates?', abort=True) + click.confirm(f"Are you sure you want to remove multiple templates?", abort=True) for template in templates: if isinstance(template, c.LocalTemplate): cond.purge_template(template) diff --git a/pros/cli/interactive.py b/pros/cli/interactive.py index 634f1b2f..465f716f 100644 --- a/pros/cli/interactive.py +++ b/pros/cli/interactive.py @@ -1,10 +1,14 @@ import os from typing import * + import click + import pros.conductor as c -from .common import PROSGroup, default_options, project_option, pros_root from pros.ga.analytics import analytics +from .common import PROSGroup, default_options, project_option, pros_root + + @pros_root def interactive_cli(): pass @@ -17,11 +21,12 @@ def interactive(): @interactive.command() -@click.option('--directory', default=os.path.join(os.path.expanduser('~'), 'My PROS Project')) +@click.option("--directory", default=os.path.join(os.path.expanduser("~"), "My PROS Project")) @default_options def new_project(directory): from pros.common.ui.interactive.renderers import MachineOutputRenderer from pros.conductor.interactive.NewProjectModal import NewProjectModal + app = NewProjectModal(directory=directory) MachineOutputRenderer(app).run() @@ -32,6 +37,7 @@ def new_project(directory): def update_project(project: Optional[c.Project]): from pros.common.ui.interactive.renderers import MachineOutputRenderer from pros.conductor.interactive.UpdateProjectModal import UpdateProjectModal + app = UpdateProjectModal(project) MachineOutputRenderer(app).run() @@ -42,4 +48,5 @@ def update_project(project: Optional[c.Project]): def upload(project: Optional[c.Project]): from pros.common.ui.interactive.renderers import MachineOutputRenderer from pros.serial.interactive import UploadProjectModal + MachineOutputRenderer(UploadProjectModal(project)).run() diff --git a/pros/cli/main.py b/pros/cli/main.py index 8e4d6725..70a982dd 100644 --- a/pros/cli/main.py +++ b/pros/cli/main.py @@ -1,77 +1,76 @@ +import ctypes import logging - -# Setup analytics first because it is used by other files - import os.path - -import pros.common.sentry - -import click -import ctypes import sys -import pros.common.ui as ui -import pros.common.ui.log -from pros.cli.click_classes import * -from pros.cli.common import default_options, root_commands -from pros.common.utils import get_version, logger -from pros.ga.analytics import analytics - +import click import jsonpickle + import pros.cli.build import pros.cli.conductor import pros.cli.conductor_utils +import pros.cli.interactive +import pros.cli.misc_commands import pros.cli.terminal import pros.cli.upload -import pros.cli.v5_utils -import pros.cli.misc_commands -import pros.cli.interactive import pros.cli.user_script +import pros.cli.v5_utils +import pros.common.sentry +import pros.common.ui as ui +import pros.common.ui.log import pros.conductor as c +from pros.cli.click_classes import * +from pros.cli.common import default_options, root_commands +from pros.common.utils import get_version, logger +from pros.ga.analytics import analytics -if sys.platform == 'win32': +if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) root_sources = [ - 'build', - 'conductor', - 'conductor_utils', - 'terminal', - 'upload', - 'v5_utils', - 'misc_commands', # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade - 'interactive', - 'user_script' + "build", + "conductor", + "conductor_utils", + "terminal", + "upload", + "v5_utils", + "misc_commands", # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade + "interactive", + "user_script", ] -if getattr(sys, 'frozen', False): +if getattr(sys, "frozen", False): exe_file = sys.executable else: exe_file = __file__ -if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, '.git')): - root_sources.append('test') +if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, ".git")): + root_sources.append("test") -if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, '.git')): +if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, ".git")): import pros.cli.test for root_source in root_sources: - __import__(f'pros.cli.{root_source}') + __import__(f"pros.cli.{root_source}") def main(): try: ctx_obj = {} click_handler = pros.common.ui.log.PROSLogHandler(ctx_obj=ctx_obj) - ctx_obj['click_handler'] = click_handler - formatter = pros.common.ui.log.PROSLogFormatter('%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}' - .format(version = get_version()), ctx_obj) + ctx_obj["click_handler"] = click_handler + formatter = pros.common.ui.log.PROSLogFormatter( + "%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}".format( + version=get_version() + ), + ctx_obj, + ) click_handler.setFormatter(formatter) logging.basicConfig(level=logging.WARNING, handlers=[click_handler]) - cli.main(prog_name='pros', obj=ctx_obj, windows_expand_args=False) + cli.main(prog_name="pros", obj=ctx_obj, windows_expand_args=False) except KeyboardInterrupt: - click.echo('Aborted!') + click.echo("Aborted!") except Exception as e: logger(__name__).exception(e) @@ -80,10 +79,10 @@ def version(ctx: click.Context, param, value): if not value: return ctx.ensure_object(dict) - if ctx.obj.get('machine_output', False): + if ctx.obj.get("machine_output", False): ui.echo(get_version()) else: - ui.echo('pros, version {}'.format(get_version())) + ui.echo("pros, version {}".format(get_version())) ctx.exit(0) @@ -96,13 +95,16 @@ def use_analytics(ctx: click.Context, param, value): elif str(value).lower().startswith("f"): touse = False else: - ui.echo('Invalid argument provided for \'--use-analytics\'. Try \'--use-analytics=False\' or \'--use-analytics=True\'') + ui.echo( + "Invalid argument provided for '--use-analytics'. Try '--use-analytics=False' or '--use-analytics=True'" + ) ctx.exit(0) ctx.ensure_object(dict) analytics.set_use(touse) - ui.echo(f'Analytics usage set to: {analytics.useAnalytics}') + ui.echo(f"Analytics usage set to: {analytics.useAnalytics}") ctx.exit(0) - + + def use_early_access(ctx: click.Context, param, value): if value is None: return @@ -113,31 +115,47 @@ def use_early_access(ctx: click.Context, param, value): elif value.startswith("f") or value in ["0", "no", "n"]: conductor.use_early_access = False else: - ui.echo('Invalid argument provided for \'--use-early-access\'. Try \'--use-early-access=False\' or \'--use-early-access=True\'') + ui.echo( + "Invalid argument provided for '--use-early-access'. Try '--use-early-access=False' or '--use-early-access=True'" + ) ctx.exit(0) conductor.save() - ui.echo(f'Early access set to: {conductor.use_early_access}') + ui.echo(f"Early access set to: {conductor.use_early_access}") ctx.exit(0) -@click.command('pros', - cls=PROSCommandCollection, - sources=root_commands) +@click.command("pros", cls=PROSCommandCollection, sources=root_commands) @click.pass_context @default_options -@click.option('--version', help='Displays version and exits.', is_flag=True, expose_value=False, is_eager=True, - callback=version) -@click.option('--use-analytics', help='Set analytics usage (True/False).', type=str, expose_value=False, - is_eager=True, default=None, callback=use_analytics) -@click.option('--use-early-access', type=str, expose_value=False, is_eager=True, default=None, - help='Create projects with PROS 4 kernel by default', callback=use_early_access) +@click.option( + "--version", help="Displays version and exits.", is_flag=True, expose_value=False, is_eager=True, callback=version +) +@click.option( + "--use-analytics", + help="Set analytics usage (True/False).", + type=str, + expose_value=False, + is_eager=True, + default=None, + callback=use_analytics, +) +@click.option( + "--use-early-access", + type=str, + expose_value=False, + is_eager=True, + default=None, + help="Create projects with PROS 4 kernel by default", + callback=use_early_access, +) def cli(ctx): pros.common.sentry.register() ctx.call_on_close(after_command) + def after_command(): analytics.process_requests() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pros/cli/misc_commands.py b/pros/cli/misc_commands.py index 8566456a..36fa6dd5 100644 --- a/pros/cli/misc_commands.py +++ b/pros/cli/misc_commands.py @@ -2,41 +2,52 @@ from pros.cli.common import * from pros.ga.analytics import analytics + @pros_root def misc_commands_cli(): pass @misc_commands_cli.command() -@click.option('--force-check', default=False, is_flag=True, - help='Force check for updates, disregarding auto-check frequency') -@click.option('--no-install', default=False, is_flag=True, - help='Only check if a new version is available, do not attempt to install') +@click.option( + "--force-check", default=False, is_flag=True, help="Force check for updates, disregarding auto-check frequency" +) +@click.option( + "--no-install", + default=False, + is_flag=True, + help="Only check if a new version is available, do not attempt to install", +) @default_options def upgrade(force_check, no_install): """ Check for updates to the PROS CLI """ with ui.Notification(): - ui.echo('The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', color='yellow') - - return # Dead code below - + ui.echo( + 'The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', + color="yellow", + ) + + return # Dead code below + analytics.send("upgrade") from pros.upgrade import UpgradeManager + manager = UpgradeManager() manifest = manager.get_manifest(force_check) ui.logger(__name__).debug(repr(manifest)) if manager.has_stale_manifest: - ui.logger(__name__).error('Failed to get latest upgrade information. ' - 'Try running with --debug for more information') + ui.logger(__name__).error( + "Failed to get latest upgrade information. " + "Try running with --debug for more information" + ) return -1 if not manager.needs_upgrade: - ui.finalize('upgradeInfo', 'PROS CLI is up to date') + ui.finalize("upgradeInfo", "PROS CLI is up to date") else: - ui.finalize('upgradeInfo', manifest) + ui.finalize("upgradeInfo", manifest) if not no_install: if not manager.can_perform_upgrade: - ui.logger(__name__).error(f'This manifest cannot perform the upgrade.') + ui.logger(__name__).error(f"This manifest cannot perform the upgrade.") return -3 - ui.finalize('upgradeComplete', manager.perform_upgrade()) + ui.finalize("upgradeComplete", manager.perform_upgrade()) diff --git a/pros/cli/terminal.py b/pros/cli/terminal.py index 2f05f2fe..ac6f25a0 100644 --- a/pros/cli/terminal.py +++ b/pros/cli/terminal.py @@ -1,17 +1,19 @@ import os import signal +import sys import time import click -import sys import pros.conductor as c import pros.serial.devices as devices -from pros.serial.ports import DirectPort from pros.common.utils import logger -from .common import default_options, resolve_v5_port, resolve_cortex_port, pros_root -from pros.serial.ports.v5_wireless_port import V5WirelessPort from pros.ga.analytics import analytics +from pros.serial.ports import DirectPort +from pros.serial.ports.v5_wireless_port import V5WirelessPort + +from .common import default_options, pros_root, resolve_cortex_port, resolve_v5_port + @pros_root def terminal_cli(): @@ -20,18 +22,25 @@ def terminal_cli(): @terminal_cli.command() @default_options -@click.argument('port', default='default') -@click.option('--backend', type=click.Choice(['share', 'solo']), default='solo', - help='Backend port of the terminal. See above for details') -@click.option('--raw', is_flag=True, default=False, - help='Don\'t process the data.') -@click.option('--hex', is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") -@click.option('--ports', nargs=2, type=int, default=(None, None), - help='Specify 2 ports for the "share" backend. The default option deterministically selects ports ' - 'based on the serial port name') -@click.option('--banner/--no-banner', 'request_banner', default=True) -@click.option('--output', nargs = 1, type=str, is_eager = True, help='Redirect terminal output to a file', default=None) - +@click.argument("port", default="default") +@click.option( + "--backend", + type=click.Choice(["share", "solo"]), + default="solo", + help="Backend port of the terminal. See above for details", +) +@click.option("--raw", is_flag=True, default=False, help="Don't process the data.") +@click.option("--hex", is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") +@click.option( + "--ports", + nargs=2, + type=int, + default=(None, None), + help='Specify 2 ports for the "share" backend. The default option deterministically selects ports ' + "based on the serial port name", +) +@click.option("--banner/--no-banner", "request_banner", default=True) +@click.option("--output", nargs=1, type=str, is_eager=True, help="Redirect terminal output to a file", default=None) def terminal(port: str, backend: str, **kwargs): """ Open a terminal to a serial port @@ -42,36 +51,37 @@ def terminal(port: str, backend: str, **kwargs): may be preferred when "share" doesn't perform adequately. Note: share backend is not yet implemented. - """ + """ analytics.send("terminal") from pros.serial.devices.vex.v5_user_device import V5UserDevice from pros.serial.terminal import Terminal + is_v5_user_joystick = False - if port == 'default': + if port == "default": project_path = c.Project.find_project(os.getcwd()) if project_path is None: - v5_port, is_v5_user_joystick = resolve_v5_port(None, 'user', quiet=True) + v5_port, is_v5_user_joystick = resolve_v5_port(None, "user", quiet=True) cortex_port = resolve_cortex_port(None, quiet=True) if ((v5_port is None) ^ (cortex_port is None)) or (v5_port is not None and v5_port == cortex_port): port = v5_port or cortex_port else: - raise click.UsageError('You must be in a PROS project directory to enable default port selecting') + raise click.UsageError("You must be in a PROS project directory to enable default port selecting") else: project = c.Project(project_path) port = project.target - if port == 'v5': + if port == "v5": port = None - port, is_v5_user_joystick = resolve_v5_port(port, 'user') - elif port == 'cortex': + port, is_v5_user_joystick = resolve_v5_port(port, "user") + elif port == "cortex": port = None port = resolve_cortex_port(port) - kwargs['raw'] = True + kwargs["raw"] = True if not port: return -1 - if backend == 'share': - raise NotImplementedError('Share backend is not yet implemented') + if backend == "share": + raise NotImplementedError("Share backend is not yet implemented") # ser = SerialSharePort(port) elif is_v5_user_joystick: logger(__name__).debug("it's a v5 joystick") @@ -79,31 +89,34 @@ def terminal(port: str, backend: str, **kwargs): else: logger(__name__).debug("not a v5 joystick") ser = DirectPort(port) - if kwargs.get('raw', False): + if kwargs.get("raw", False): device = devices.RawStreamDevice(ser) else: device = devices.vex.V5UserDevice(ser) - term = Terminal(device, request_banner=kwargs.pop('request_banner', True)) + term = Terminal(device, request_banner=kwargs.pop("request_banner", True)) class TerminalOutput(object): def __init__(self, file): self.terminal = sys.stdout - self.log = open(file, 'a') + self.log = open(file, "a") + def write(self, data): self.terminal.write(data) - self.log.write(data) + self.log.write(data) + def flush(self): pass + def end(self): self.log.close() output = None - if kwargs.get('output', None): - output_file = kwargs['output'] - output = TerminalOutput(f'{output_file}') + if kwargs.get("output", None): + output_file = kwargs["output"] + output = TerminalOutput(f"{output_file}") term.console.output = output sys.stdout = output - logger(__name__).info(f'Redirecting Terminal Output to File: {output_file}') + logger(__name__).info(f"Redirecting Terminal Output to File: {output_file}") else: sys.stdout = sys.__stdout__ @@ -117,4 +130,4 @@ def end(self): if output: output.end() term.join() - logger(__name__).info('CLI Main Thread Dying') + logger(__name__).info("CLI Main Thread Dying") diff --git a/pros/cli/upload.py b/pros/cli/upload.py index 5a50f3bd..9712bf8f 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -3,45 +3,116 @@ import pros.common.ui as ui import pros.conductor as c +from pros.ga.analytics import analytics from .common import * -from pros.ga.analytics import analytics + @pros_root def upload_cli(): pass -@upload_cli.command(aliases=['u']) -@click.option('--target', type=click.Choice(['v5', 'cortex']), default=None, required=False, - help='Specify the target microcontroller. Overridden when a PROS project is specified.') -@click.argument('path', type=click.Path(exists=True), default=None, required=False) -@click.argument('port', type=str, default=None, required=False) +@upload_cli.command(aliases=["u"]) +@click.option( + "--target", + type=click.Choice(["v5", "cortex"]), + default=None, + required=False, + help="Specify the target microcontroller. Overridden when a PROS project is specified.", +) +@click.argument("path", type=click.Path(exists=True), default=None, required=False) +@click.argument("port", type=str, default=None, required=False) @project_option(required=False, allow_none=True) -@click.option('--run-after/--no-run-after', 'run_after', default=None, help='Immediately run the uploaded program.', - cls=PROSDeprecated, replacement='after') -@click.option('--run-screen/--execute', 'run_screen', default=None, help='Display run program screen on the brain after upload.', - cls=PROSDeprecated, replacement='after') -@click.option('-af', '--after', type=click.Choice(['run','screen','none']), default=None, help='Action to perform on the brain after upload.', - cls=PROSOption, group='V5 Options') -@click.option('--quirk', type=int, default=0) -@click.option('--name', 'remote_name', type=str, default=None, required=False, help='Remote program name.', - cls=PROSOption, group='V5 Options') -@click.option('--slot', default=None, type=click.IntRange(min=1, max=8), help='Program slot on the GUI.', - cls=PROSOption, group='V5 Options') -@click.option('--icon', type=click.Choice(['pros','pizza','planet','alien','ufo','robot','clawbot','question','X','power']), default='pros', - help="Change Program's icon on the V5 Brain", cls=PROSOption, group='V5 Options') -@click.option('--program-version', default=None, type=str, help='Specify version metadata for program.', - cls=PROSOption, group='V5 Options', hidden=True) -@click.option('--ini-config', type=click.Path(exists=True), default=None, help='Specify a program configuration file.', - cls=PROSOption, group='V5 Options', hidden=True) -@click.option('--compress-bin/--no-compress-bin', 'compress_bin', cls=PROSOption, group='V5 Options', default=True, - help='Compress the program binary before uploading.') -@click.option('--description', default="Made with PROS", type=str, cls=PROSOption, group='V5 Options', - help='Change the description displayed for the program.') -@click.option('--name', default=None, type=str, cls=PROSOption, group='V5 Options', - help='Change the name of the program.') - +@click.option( + "--run-after/--no-run-after", + "run_after", + default=None, + help="Immediately run the uploaded program.", + cls=PROSDeprecated, + replacement="after", +) +@click.option( + "--run-screen/--execute", + "run_screen", + default=None, + help="Display run program screen on the brain after upload.", + cls=PROSDeprecated, + replacement="after", +) +@click.option( + "-af", + "--after", + type=click.Choice(["run", "screen", "none"]), + default=None, + help="Action to perform on the brain after upload.", + cls=PROSOption, + group="V5 Options", +) +@click.option("--quirk", type=int, default=0) +@click.option( + "--name", + "remote_name", + type=str, + default=None, + required=False, + help="Remote program name.", + cls=PROSOption, + group="V5 Options", +) +@click.option( + "--slot", + default=None, + type=click.IntRange(min=1, max=8), + help="Program slot on the GUI.", + cls=PROSOption, + group="V5 Options", +) +@click.option( + "--icon", + type=click.Choice(["pros", "pizza", "planet", "alien", "ufo", "robot", "clawbot", "question", "X", "power"]), + default="pros", + help="Change Program's icon on the V5 Brain", + cls=PROSOption, + group="V5 Options", +) +@click.option( + "--program-version", + default=None, + type=str, + help="Specify version metadata for program.", + cls=PROSOption, + group="V5 Options", + hidden=True, +) +@click.option( + "--ini-config", + type=click.Path(exists=True), + default=None, + help="Specify a program configuration file.", + cls=PROSOption, + group="V5 Options", + hidden=True, +) +@click.option( + "--compress-bin/--no-compress-bin", + "compress_bin", + cls=PROSOption, + group="V5 Options", + default=True, + help="Compress the program binary before uploading.", +) +@click.option( + "--description", + default="Made with PROS", + type=str, + cls=PROSOption, + group="V5 Options", + help="Change the description displayed for the program.", +) +@click.option( + "--name", default=None, type=str, cls=PROSOption, group="V5 Options", help="Change the name of the program." +) @default_options def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwargs): """ @@ -56,155 +127,155 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg analytics.send("upload") import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort - kwargs['ide_version'] = project.kernel if not project==None else "None" - kwargs['ide'] = 'PROS' + + kwargs["ide_version"] = project.kernel if not project == None else "None" + kwargs["ide"] = "PROS" if path is None or os.path.isdir(path): if project is None: project_path = c.Project.find_project(path or os.getcwd()) if project_path is None: - raise click.UsageError('Specify a file to upload or set the cwd inside a PROS project') + raise click.UsageError("Specify a file to upload or set the cwd inside a PROS project") project = c.Project(project_path) path = os.path.join(project.location, project.output) - if project.target == 'v5' and not kwargs['remote_name']: - kwargs['remote_name'] = project.name + if project.target == "v5" and not kwargs["remote_name"]: + kwargs["remote_name"] = project.name # apply upload_options as a template options = dict(**project.upload_options) - if 'port' in options and port is None: - port = options.get('port', None) - if 'slot' in options and kwargs.get('slot', None) is None: - kwargs.pop('slot') - elif kwargs.get('slot', None) is None: - kwargs['slot'] = 1 - if 'icon' in options and kwargs.get('icon','pros') == 'pros': - kwargs.pop('icon') - if 'after' in options and kwargs.get('after','screen') is None: - kwargs.pop('after') + if "port" in options and port is None: + port = options.get("port", None) + if "slot" in options and kwargs.get("slot", None) is None: + kwargs.pop("slot") + elif kwargs.get("slot", None) is None: + kwargs["slot"] = 1 + if "icon" in options and kwargs.get("icon", "pros") == "pros": + kwargs.pop("icon") + if "after" in options and kwargs.get("after", "screen") is None: + kwargs.pop("after") options.update(kwargs) kwargs = options - kwargs['target'] = project.target # enforce target because uploading to the wrong uC is VERY bad - if 'program-version' in kwargs: - kwargs['version'] = kwargs['program-version'] - if 'remote_name' not in kwargs: - kwargs['remote_name'] = project.name + kwargs["target"] = project.target # enforce target because uploading to the wrong uC is VERY bad + if "program-version" in kwargs: + kwargs["version"] = kwargs["program-version"] + if "remote_name" not in kwargs: + kwargs["remote_name"] = project.name name_to_file = { - 'pros' : 'USER902x.bmp', - 'pizza' : 'USER003x.bmp', - 'planet' : 'USER013x.bmp', - 'alien' : 'USER027x.bmp', - 'ufo' : 'USER029x.bmp', - 'clawbot' : 'USER010x.bmp', - 'robot' : 'USER011x.bmp', - 'question' : 'USER002x.bmp', - 'power' : 'USER012x.bmp', - 'X' : 'USER001x.bmp' + "pros": "USER902x.bmp", + "pizza": "USER003x.bmp", + "planet": "USER013x.bmp", + "alien": "USER027x.bmp", + "ufo": "USER029x.bmp", + "clawbot": "USER010x.bmp", + "robot": "USER011x.bmp", + "question": "USER002x.bmp", + "power": "USER012x.bmp", + "X": "USER001x.bmp", } - kwargs['icon'] = name_to_file[kwargs['icon']] - if 'target' not in kwargs or kwargs['target'] is None: - logger(__name__).debug(f'Target not specified. Arguments provided: {kwargs}') - raise click.UsageError('Target not specified. specify a project (using the file argument) or target manually') - if kwargs['target'] == 'v5': - port = resolve_v5_port(port, 'system')[0] - elif kwargs['target'] == 'cortex': + kwargs["icon"] = name_to_file[kwargs["icon"]] + if "target" not in kwargs or kwargs["target"] is None: + logger(__name__).debug(f"Target not specified. Arguments provided: {kwargs}") + raise click.UsageError("Target not specified. specify a project (using the file argument) or target manually") + if kwargs["target"] == "v5": + port = resolve_v5_port(port, "system")[0] + elif kwargs["target"] == "cortex": port = resolve_cortex_port(port) else: logger(__name__).debug(f"Invalid target provided: {kwargs['target']}") logger(__name__).debug('Target should be one of ("v5" or "cortex").') if not port: - raise dont_send(click.UsageError('No port provided or located. Make sure to specify --target if needed.')) - if kwargs['target'] == 'v5': - kwargs['remote_name'] = kwargs['name'] if kwargs.get("name",None) else kwargs['remote_name'] - if kwargs['remote_name'] is None: - kwargs['remote_name'] = os.path.splitext(os.path.basename(path))[0] - kwargs['remote_name'] = kwargs['remote_name'].replace('@', '_') - kwargs['slot'] -= 1 - + raise dont_send(click.UsageError("No port provided or located. Make sure to specify --target if needed.")) + if kwargs["target"] == "v5": + kwargs["remote_name"] = kwargs["name"] if kwargs.get("name", None) else kwargs["remote_name"] + if kwargs["remote_name"] is None: + kwargs["remote_name"] = os.path.splitext(os.path.basename(path))[0] + kwargs["remote_name"] = kwargs["remote_name"].replace("@", "_") + kwargs["slot"] -= 1 + action_to_kwarg = { - 'run' : vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, - 'screen' : vex.V5Device.FTCompleteOptions.RUN_SCREEN, - 'none' : vex.V5Device.FTCompleteOptions.DONT_RUN - } - after_upload_default = 'screen' - #Determine which FTCompleteOption to assign to run_after - if kwargs['after']==None: - kwargs['after']=after_upload_default - if kwargs['run_after']: - kwargs['after']='run' - elif kwargs['run_screen']==False and not kwargs['run_after']: - kwargs['after']='none' - kwargs['run_after'] = action_to_kwarg[kwargs['after']] - kwargs.pop('run_screen') - kwargs.pop('after') - elif kwargs['target'] == 'cortex': + "run": vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, + "screen": vex.V5Device.FTCompleteOptions.RUN_SCREEN, + "none": vex.V5Device.FTCompleteOptions.DONT_RUN, + } + after_upload_default = "screen" + # Determine which FTCompleteOption to assign to run_after + if kwargs["after"] == None: + kwargs["after"] = after_upload_default + if kwargs["run_after"]: + kwargs["after"] = "run" + elif kwargs["run_screen"] == False and not kwargs["run_after"]: + kwargs["after"] = "none" + kwargs["run_after"] = action_to_kwarg[kwargs["after"]] + kwargs.pop("run_screen") + kwargs.pop("after") + elif kwargs["target"] == "cortex": pass - logger(__name__).debug('Arguments: {}'.format(str(kwargs))) + logger(__name__).debug("Arguments: {}".format(str(kwargs))) # Do the actual uploading! try: ser = DirectPort(port) device = None - if kwargs['target'] == 'v5': + if kwargs["target"] == "v5": device = vex.V5Device(ser) - elif kwargs['target'] == 'cortex': + elif kwargs["target"] == "cortex": device = vex.CortexDevice(ser).get_connected_device() if project is not None: device.upload_project(project, **kwargs) else: - with click.open_file(path, mode='rb') as pf: + with click.open_file(path, mode="rb") as pf: device.write_program(pf, **kwargs) except Exception as e: logger(__name__).exception(e, exc_info=True) exit(1) -@upload_cli.command('lsusb', aliases=['ls-usb', 'ls-devices', 'lsdev', 'list-usb', 'list-devices']) -@click.option('--target', type=click.Choice(['v5', 'cortex']), default=None, required=False) + +@upload_cli.command("lsusb", aliases=["ls-usb", "ls-devices", "lsdev", "list-usb", "list-devices"]) +@click.option("--target", type=click.Choice(["v5", "cortex"]), default=None, required=False) @default_options def ls_usb(target): """ List plugged in VEX Devices """ analytics.send("ls-usb") - from pros.serial.devices.vex import find_v5_ports, find_cortex_ports + from pros.serial.devices.vex import find_cortex_ports, find_v5_ports class PortReport(object): def __init__(self, header: str, ports: List[Any], machine_header: Optional[str] = None): self.header = header - self.ports = [{'device': p.device, 'desc': p.description} for p in ports] + self.ports = [{"device": p.device, "desc": p.description} for p in ports] self.machine_header = machine_header or header def __getstate__(self): - return { - 'device_type': self.machine_header, - 'devices': self.ports - } + return {"device_type": self.machine_header, "devices": self.ports} def __str__(self): if len(self.ports) == 0: - return f'There are no connected {self.header}' + return f"There are no connected {self.header}" else: port_str = "\n".join([f"{p['device']} - {p['desc']}" for p in self.ports]) - return f'{self.header}:\n{port_str}' + return f"{self.header}:\n{port_str}" result = [] - if target == 'v5' or target is None: - ports = find_v5_ports('system') - result.append(PortReport('VEX EDR V5 System Ports', ports, 'v5/system')) + if target == "v5" or target is None: + ports = find_v5_ports("system") + result.append(PortReport("VEX EDR V5 System Ports", ports, "v5/system")) - ports = find_v5_ports('User') - result.append(PortReport('VEX EDR V5 User Ports', ports, 'v5/user')) - if target == 'cortex' or target is None: + ports = find_v5_ports("User") + result.append(PortReport("VEX EDR V5 User Ports", ports, "v5/user")) + if target == "cortex" or target is None: ports = find_cortex_ports() - result.append(PortReport('VEX EDR Cortex Microcontroller Ports', ports, 'cortex')) + result.append(PortReport("VEX EDR Cortex Microcontroller Ports", ports, "cortex")) - ui.finalize('lsusb', result) + ui.finalize("lsusb", result) -@upload_cli.command('upload-terminal', aliases=['ut'], hidden=True) +@upload_cli.command("upload-terminal", aliases=["ut"], hidden=True) @shadow_command(upload) @click.pass_context def make_upload_terminal(ctx, **upload_kwargs): analytics.send("upload-terminal") from .terminal import terminal + ctx.invoke(upload, **upload_kwargs) ctx.invoke(terminal, request_banner=False) diff --git a/pros/cli/user_script.py b/pros/cli/user_script.py index be0f8259..e2e26d30 100644 --- a/pros/cli/user_script.py +++ b/pros/cli/user_script.py @@ -1,26 +1,29 @@ import click from pros.common import ui -from .common import default_options, pros_root from pros.ga.analytics import analytics +from .common import default_options, pros_root + + @pros_root def user_script_cli(): pass -@user_script_cli.command(short_help='Run user script files', hidden=True) -@click.argument('script_file') +@user_script_cli.command(short_help="Run user script files", hidden=True) +@click.argument("script_file") @default_options def user_script(script_file): """ Run a script file with the PROS CLI package """ analytics.send("user-script") - import os.path import importlib.util + import os.path + package_name = os.path.splitext(os.path.split(script_file)[0])[0] package_path = os.path.abspath(script_file) - ui.echo(f'Loading {package_name} from {package_path}') + ui.echo(f"Loading {package_name} from {package_path}") spec = importlib.util.spec_from_file_location(package_name, package_path) spec.loader.load_module() diff --git a/pros/cli/v5_utils.py b/pros/cli/v5_utils.py index a6fe0eec..f22cbe74 100644 --- a/pros/cli/v5_utils.py +++ b/pros/cli/v5_utils.py @@ -1,19 +1,21 @@ -from .common import * from pros.ga.analytics import analytics +from .common import * + + @pros_root def v5_utils_cli(): pass -@v5_utils_cli.group(cls=PROSGroup, help='Utilities for managing the VEX V5') +@v5_utils_cli.group(cls=PROSGroup, help="Utilities for managing the VEX V5") @default_options def v5(): pass @v5.command() -@click.argument('port', required=False, default=None) +@click.argument("port", required=False, default=None) @default_options def status(port: str): """ @@ -22,7 +24,8 @@ def status(port: str): analytics.send("status") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -31,17 +34,17 @@ def status(port: str): if ismachineoutput(): print(device.status) else: - print('Connected to V5 on {}'.format(port)) - print('System version:', device.status['system_version']) - print('CPU0 F/W version:', device.status['cpu0_version']) - print('CPU1 SDK version:', device.status['cpu1_version']) - print('System ID: 0x{:x}'.format(device.status['system_id'])) + print("Connected to V5 on {}".format(port)) + print("System version:", device.status["system_version"]) + print("CPU0 F/W version:", device.status["cpu0_version"]) + print("CPU1 SDK version:", device.status["cpu1_version"]) + print("System ID: 0x{:x}".format(device.status["system_id"])) -@v5.command('ls-files') -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--options', type=int, default=0, cls=PROSOption, hidden=True) -@click.argument('port', required=False, default=None) +@v5.command("ls-files") +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--options", type=int, default=0, cls=PROSOption, hidden=True) +@click.argument("port", required=False, default=None) @default_options def ls_files(port: str, vid: int, options: int): """ @@ -50,7 +53,8 @@ def ls_files(port: str, vid: int, options: int): analytics.send("ls-files") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -62,11 +66,11 @@ def ls_files(port: str, vid: int, options: int): @v5.command(hidden=True) -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.argument('outfile', required=False, default=click.get_binary_stream('stdout'), type=click.File('wb')) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--source', type=click.Choice(['ddr', 'flash']), default='flash', cls=PROSOption, hidden=True) +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.argument("outfile", required=False, default=click.get_binary_stream("stdout"), type=click.File("wb")) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--source", type=click.Choice(["ddr", "flash"]), default="flash", cls=PROSOption, hidden=True) @default_options def read_file(file_name: str, port: str, vid: int, source: str): """ @@ -75,33 +79,34 @@ def read_file(file_name: str, port: str, vid: int, source: str): analytics.send("read-file") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.read_file(file=click.get_binary_stream('stdout'), remote_file=file_name, - vid=vid, target=source) + device.read_file(file=click.get_binary_stream("stdout"), remote_file=file_name, vid=vid, target=source) @v5.command(hidden=True) -@click.argument('file', type=click.File('rb')) -@click.argument('port', required=False, default=None) -@click.option('--addr', type=int, default=0x03800000, required=False) -@click.option('--remote-file', required=False, default=None) -@click.option('--run-after/--no-run-after', 'run_after', default=False) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--target', type=click.Choice(['ddr', 'flash']), default='flash') +@click.argument("file", type=click.File("rb")) +@click.argument("port", required=False, default=None) +@click.option("--addr", type=int, default=0x03800000, required=False) +@click.option("--remote-file", required=False, default=None) +@click.option("--run-after/--no-run-after", "run_after", default=False) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--target", type=click.Choice(["ddr", "flash"]), default="flash") @default_options def write_file(file, port: str, remote_file: str, **kwargs): """ Write a file to the V5. """ analytics.send("write-file") - from pros.serial.ports import DirectPort from pros.serial.devices.vex import V5Device - port = resolve_v5_port(port, 'system')[0] + from pros.serial.ports import DirectPort + + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -110,12 +115,17 @@ def write_file(file, port: str, remote_file: str, **kwargs): device.write_file(file=file, remote_file=remote_file or os.path.basename(file.name), **kwargs) -@v5.command('rm-file') -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--erase-all/--erase-only', 'erase_all', default=False, show_default=True, - help='Erase all files matching base name.') +@v5.command("rm-file") +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option( + "--erase-all/--erase-only", + "erase_all", + default=False, + show_default=True, + help="Erase all files matching base name.", +) @default_options def rm_file(file_name: str, port: str, vid: int, erase_all: bool): """ @@ -124,7 +134,8 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): analytics.send("rm-file") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -133,10 +144,10 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): device.erase_file(file_name, vid=vid, erase_all=erase_all) -@v5.command('cat-metadata') -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("cat-metadata") +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @default_options def cat_metadata(file_name: str, port: str, vid: int): """ @@ -145,7 +156,8 @@ def cat_metadata(file_name: str, port: str, vid: int): analytics.send("cat-metadata") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -153,10 +165,11 @@ def cat_metadata(file_name: str, port: str, vid: int): device = V5Device(ser) print(device.get_file_metadata_by_name(file_name, vid=vid)) -@v5.command('rm-program') -@click.argument('slot') -@click.argument('port', type=int, required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) + +@v5.command("rm-program") +@click.argument("slot") +@click.argument("port", type=int, required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @default_options def rm_program(slot: int, port: str, vid: int): """ @@ -164,19 +177,21 @@ def rm_program(slot: int, port: str, vid: int): """ from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: - return - 1 + return -1 - base_name = f'slot_{slot}' + base_name = f"slot_{slot}" ser = DirectPort(port) device = V5Device(ser) - device.erase_file(f'{base_name}.ini', vid=vid) - device.erase_file(f'{base_name}.bin', vid=vid) + device.erase_file(f"{base_name}.ini", vid=vid) + device.erase_file(f"{base_name}.bin", vid=vid) -@v5.command('rm-all') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, hidden=True, cls=PROSOption) + +@v5.command("rm-all") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, hidden=True, cls=PROSOption) @default_options def rm_all(port: str, vid: int): """ @@ -185,7 +200,8 @@ def rm_all(port: str, vid: int): analytics.send("rm-all") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -194,14 +210,14 @@ def rm_all(port: str, vid: int): c = device.get_dir_count(vid=vid) files = [] for i in range(0, c): - files.append(device.get_file_metadata_by_idx(i)['filename']) + files.append(device.get_file_metadata_by_idx(i)["filename"]) for file in files: device.erase_file(file, vid=vid) -@v5.command(short_help='Run a V5 Program') -@click.argument('slot', required=False, default=1, type=click.IntRange(1, 8)) -@click.argument('port', required=False, default=None) +@v5.command(short_help="Run a V5 Program") +@click.argument("slot", required=False, default=1, type=click.IntRange(1, 8)) +@click.argument("port", required=False, default=None) @default_options def run(slot: str, port: str): """ @@ -210,12 +226,14 @@ def run(slot: str, port: str): analytics.send("run") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - file = f'slot_{slot}.bin' + + file = f"slot_{slot}.bin" import re - if not re.match(r'[\w\.]{1,24}', file): - logger(__name__).error('file must be a valid V5 filename') + + if not re.match(r"[\w\.]{1,24}", file): + logger(__name__).error("file must be a valid V5 filename") return 1 - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) @@ -223,8 +241,8 @@ def run(slot: str, port: str): device.execute_program_file(file, run=True) -@v5.command(short_help='Stop a V5 Program') -@click.argument('port', required=False, default=None) +@v5.command(short_help="Stop a V5 Program") +@click.argument("port", required=False, default=None) @default_options def stop(port: str): """ @@ -234,29 +252,32 @@ def stop(port: str): """ from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.execute_program_file('', run=False) + device.execute_program_file("", run=False) -@v5.command(short_help='Take a screen capture of the display') -@click.argument('file_name', required=False, default=None) -@click.argument('port', required=False, default=None) -@click.option('--force', is_flag=True, type=bool, default=False) +@v5.command(short_help="Take a screen capture of the display") +@click.argument("file_name", required=False, default=None) +@click.argument("port", required=False, default=None) +@click.option("--force", is_flag=True, type=bool, default=False) @default_options def capture(file_name: str, port: str, force: bool = False): """ Take a screen capture of the display """ + import os + + import png + from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - import png - import os - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) @@ -264,62 +285,71 @@ def capture(file_name: str, port: str, force: bool = False): i_data, width, height = device.capture_screen() if i_data is None: - print('Failed to capture screen from connected brain.') + print("Failed to capture screen from connected brain.") return -1 # Sanity checking and default values for filenames if file_name is None: import time - time_s = time.strftime('%Y-%m-%d-%H%M%S') - file_name = f'{time_s}_{width}x{height}_pros_capture.png' - if file_name == '-': + + time_s = time.strftime("%Y-%m-%d-%H%M%S") + file_name = f"{time_s}_{width}x{height}_pros_capture.png" + if file_name == "-": # Send the data to stdout to allow for piping - print(i_data, end='') + print(i_data, end="") return - if not file_name.endswith('.png'): - file_name += '.png' + if not file_name.endswith(".png"): + file_name += ".png" if not force and os.path.exists(file_name): - print(f'{file_name} already exists. Refusing to overwrite!') - print('Re-run this command with the --force argument to overwrite existing files.') + print(f"{file_name} already exists. Refusing to overwrite!") + print("Re-run this command with the --force argument to overwrite existing files.") return -1 - with open(file_name, 'wb') as file_: + with open(file_name, "wb") as file_: w = png.Writer(width, height, greyscale=False) w.write(file_, i_data) - print(f'Saved screen capture to {file_name}') + print(f"Saved screen capture to {file_name}") + -@v5.command('set-variable', aliases=['sv', 'set', 'set_variable'], short_help='Set a kernel variable on a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('value', required=True, type=click.STRING, nargs=1) -@click.argument('port', type=str, default=None, required=False) +@v5.command( + "set-variable", aliases=["sv", "set", "set_variable"], short_help="Set a kernel variable on a connected V5 device" +) +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("value", required=True, type=click.STRING, nargs=1) +@click.argument("port", type=str, default=None, required=False) @default_options def set_variable(variable, value, port): import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort # Get the connected v5 device - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if port == None: return device = vex.V5Device(DirectPort(port)) actual_value = device.kv_write(variable, value).decode() - print(f'Value of \'{variable}\' set to : {actual_value}') + print(f"Value of '{variable}' set to : {actual_value}") + -@v5.command('read-variable', aliases=['rv', 'get', 'read_variable'], short_help='Read a kernel variable from a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('port', type=str, default=None, required=False) +@v5.command( + "read-variable", + aliases=["rv", "get", "read_variable"], + short_help="Read a kernel variable from a connected V5 device", +) +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("port", type=str, default=None, required=False) @default_options def read_variable(variable, port): import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort # Get the connected v5 device - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if port == None: return device = vex.V5Device(DirectPort(port)) value = device.kv_read(variable).decode() - print(f'Value of \'{variable}\' is : {value}') + print(f"Value of '{variable}' is : {value}") diff --git a/pros/common/sentry.py b/pros/common/sentry.py index 6c0c8690..57032478 100644 --- a/pros/common/sentry.py +++ b/pros/common/sentry.py @@ -5,18 +5,21 @@ import pros.common.ui as ui if TYPE_CHECKING: - from sentry_sdk import Client, Hub, Scope # noqa: F401, flake8 issue with "if TYPE_CHECKING" import jsonpickle.handlers # noqa: F401, flake8 issue, flake8 issue with "if TYPE_CHECKING" + from sentry_sdk import Client, Hub, Scope # noqa: F401, flake8 issue with "if TYPE_CHECKING" + from pros.config.cli_config import CliConfig # noqa: F401, flake8 issue, flake8 issue with "if TYPE_CHECKING" -cli_config: 'CliConfig' = None +cli_config: "CliConfig" = None force_prompt_off = False SUPPRESSED_EXCEPTIONS = [PermissionError, click.Abort] + def disable_prompt(): global force_prompt_off force_prompt_off = True + def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: """ Asks the user for permission to send data to Sentry @@ -26,39 +29,43 @@ def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Opt if cli_config is None or (cli_config.offer_sentry is not None and not cli_config.offer_sentry): return if force_prompt_off: - ui.logger(__name__).debug('Sentry prompt was forced off through click option') + ui.logger(__name__).debug("Sentry prompt was forced off through click option") return - if 'extra' in event and not event['extra'].get('sentry', True): - ui.logger(__name__).debug('Not sending candidate event because event was tagged with extra.sentry = False') + if "extra" in event and not event["extra"].get("sentry", True): + ui.logger(__name__).debug("Not sending candidate event because event was tagged with extra.sentry = False") return - if 'exc_info' in hint and (not getattr(hint['exc_info'][1], 'sentry', True) or - any(isinstance(hint['exc_info'][1], t) for t in SUPPRESSED_EXCEPTIONS)): - ui.logger(__name__).debug('Not sending candidate event because exception was tagged with sentry = False') + if "exc_info" in hint and ( + not getattr(hint["exc_info"][1], "sentry", True) + or any(isinstance(hint["exc_info"][1], t) for t in SUPPRESSED_EXCEPTIONS) + ): + ui.logger(__name__).debug("Not sending candidate event because exception was tagged with sentry = False") return - if not event['tags']: - event['tags'] = dict() - - extra_text = '' - if 'message' in event: - extra_text += event['message'] + '\n' - if 'culprit' in event: - extra_text += event['culprit'] + '\n' - if 'logentry' in event and 'message' in event['logentry']: - extra_text += event['logentry']['message'] + '\n' - if 'exc_info' in hint: + if not event["tags"]: + event["tags"] = dict() + + extra_text = "" + if "message" in event: + extra_text += event["message"] + "\n" + if "culprit" in event: + extra_text += event["culprit"] + "\n" + if "logentry" in event and "message" in event["logentry"]: + extra_text += event["logentry"]["message"] + "\n" + if "exc_info" in hint: import traceback - extra_text += ''.join(traceback.format_exception(*hint['exc_info'], limit=4)) - event['tags']['confirmed'] = ui.confirm('We detected something went wrong! Do you want to send a report?', - log=extra_text) - if event['tags']['confirmed']: - ui.echo('Sending bug report.') + extra_text += "".join(traceback.format_exception(*hint["exc_info"], limit=4)) + + event["tags"]["confirmed"] = ui.confirm( + "We detected something went wrong! Do you want to send a report?", log=extra_text + ) + if event["tags"]["confirmed"]: + ui.echo("Sending bug report.") ui.echo(f'Want to get updates? Visit https://pros.cs.purdue.edu/report.html?event={event["event_id"]}') return event else: - ui.echo('Not sending bug report.') + ui.echo("Not sending bug report.") def add_context(obj: object, override_handlers: bool = True, key: str = None) -> None: @@ -70,6 +77,7 @@ def add_context(obj: object, override_handlers: bool = True, key: str = None) -> """ import jsonpickle.handlers # noqa: F811, flake8 issue with "if TYPE_CHECKING" + from pros.conductor.templates import BaseTemplate class TemplateHandler(jsonpickle.handlers.BaseHandler): @@ -77,18 +85,19 @@ class TemplateHandler(jsonpickle.handlers.BaseHandler): Override how templates get pickled by JSON pickle - we don't want to send all of the data about a template from an object """ + from pros.conductor.templates import BaseTemplate def flatten(self, obj: BaseTemplate, data): rv = { - 'name': obj.name, - 'version': obj.version, - 'target': obj.target, + "name": obj.name, + "version": obj.version, + "target": obj.target, } - if hasattr(obj, 'location'): - rv['location'] = obj.location - if hasattr(obj, 'origin'): - rv['origin'] = obj.origin + if hasattr(obj, "location"): + rv["location"] = obj.location + if hasattr(obj, "origin"): + rv["origin"] = obj.origin return rv def restore(self, obj): @@ -98,6 +107,7 @@ def restore(self, obj): jsonpickle.handlers.register(BaseTemplate, TemplateHandler, base=True) from sentry_sdk import configure_scope + with configure_scope() as scope: scope.set_extra((key or obj.__class__.__qualname__), jsonpickle.pickler.Pickler(unpicklable=False).flatten(obj)) @@ -112,10 +122,11 @@ def add_tag(key: str, value: str): scope.set_tag(key, value) -def register(cfg: Optional['CliConfig'] = None): +def register(cfg: Optional["CliConfig"] = None): global cli_config, client if cfg is None: from pros.config.cli_config import cli_config as get_cli_config + cli_config = get_cli_config() else: cli_config = cfg @@ -126,17 +137,18 @@ def register(cfg: Optional['CliConfig'] = None): return import sentry_sdk as sentry + from pros.upgrade import get_platformv2 client = sentry.Client( - 'https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033', + "https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033", before_send=prompt_to_send, - release=ui.get_version() + release=ui.get_version(), ) sentry.Hub.current.bind_client(client) with sentry.configure_scope() as scope: - scope.set_tag('platformv2', get_platformv2().name) + scope.set_tag("platformv2", get_platformv2().name) -__all__ = ['add_context', 'register', 'add_tag'] +__all__ = ["add_context", "register", "add_tag"] diff --git a/pros/common/ui/__init__.py b/pros/common/ui/__init__.py index 24fcc71d..4b4963e4 100644 --- a/pros/common/ui/__init__.py +++ b/pros/common/ui/__init__.py @@ -12,32 +12,46 @@ def _machineoutput(obj: Dict[str, Any]): - click.echo(f'Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}') + click.echo(f"Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}") def _machine_notify(method: str, obj: Dict[str, Any], notify_value: Optional[int]): if notify_value is None: global _current_notify_value notify_value = _current_notify_value - obj['type'] = f'notify/{method}' - obj['notify_value'] = notify_value + obj["type"] = f"notify/{method}" + obj["notify_value"] = notify_value _machineoutput(obj) -def echo(text: Any, err: bool = False, nl: bool = True, notify_value: int = None, color: Any = None, - output_machine: bool = True, ctx: Optional[click.Context] = None): - add_breadcrumb(message=text, category='echo') +def echo( + text: Any, + err: bool = False, + nl: bool = True, + notify_value: int = None, + color: Any = None, + output_machine: bool = True, + ctx: Optional[click.Context] = None, +): + add_breadcrumb(message=text, category="echo") if ismachineoutput(ctx): if output_machine: - return _machine_notify('echo', {'text': str(text) + ('\n' if nl else '')}, notify_value) + return _machine_notify("echo", {"text": str(text) + ("\n" if nl else "")}, notify_value) else: return click.echo(str(text), nl=nl, err=err, color=color) -def confirm(text: str, default: bool = False, abort: bool = False, prompt_suffix: bool = ': ', - show_default: bool = True, err: bool = False, title: AnyStr = 'Please confirm:', - log: str = None): - add_breadcrumb(message=text, category='confirm') +def confirm( + text: str, + default: bool = False, + abort: bool = False, + prompt_suffix: bool = ": ", + show_default: bool = True, + err: bool = False, + title: AnyStr = "Please confirm:", + log: str = None, +): + add_breadcrumb(message=text, category="confirm") if ismachineoutput(): from pros.common.ui.interactive.ConfirmModal import ConfirmModal from pros.common.ui.interactive.renderers import MachineOutputRenderer @@ -45,37 +59,66 @@ def confirm(text: str, default: bool = False, abort: bool = False, prompt_suffix app = ConfirmModal(text, abort, title, log) rv = MachineOutputRenderer(app).run() else: - rv = click.confirm(text, default=default, abort=abort, prompt_suffix=prompt_suffix, - show_default=show_default, err=err) - add_breadcrumb(message=f'User responded: {rv}') + rv = click.confirm( + text, default=default, abort=abort, prompt_suffix=prompt_suffix, show_default=show_default, err=err + ) + add_breadcrumb(message=f"User responded: {rv}") return rv -def prompt(text, default=None, hide_input=False, - confirmation_prompt=False, type=None, - value_proc=None, prompt_suffix=': ', - show_default=True, err=False): +def prompt( + text, + default=None, + hide_input=False, + confirmation_prompt=False, + type=None, + value_proc=None, + prompt_suffix=": ", + show_default=True, + err=False, +): if ismachineoutput(): # TODO pass else: - return click.prompt(text, default=default, hide_input=hide_input, confirmation_prompt=confirmation_prompt, - type=type, value_proc=value_proc, prompt_suffix=prompt_suffix, show_default=show_default, - err=err) - - -def progressbar(iterable: Iterable = None, length: int = None, label: str = None, show_eta: bool = True, - show_percent: bool = True, show_pos: bool = False, item_show_func: Callable = None, - fill_char: str = '#', empty_char: str = '-', bar_template: str = '%(label)s [%(bar)s] %(info)s', - info_sep: str = ' ', width: int = 36): + return click.prompt( + text, + default=default, + hide_input=hide_input, + confirmation_prompt=confirmation_prompt, + type=type, + value_proc=value_proc, + prompt_suffix=prompt_suffix, + show_default=show_default, + err=err, + ) + + +def progressbar( + iterable: Iterable = None, + length: int = None, + label: str = None, + show_eta: bool = True, + show_percent: bool = True, + show_pos: bool = False, + item_show_func: Callable = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, +): if ismachineoutput(): return _MachineOutputProgressBar(**locals()) else: return click.progressbar(**locals()) -def finalize(method: str, data: Union[str, Dict, object, List[Union[str, Dict, object, Tuple]]], - human_prefix: Optional[str] = None): +def finalize( + method: str, + data: Union[str, Dict, object, List[Union[str, Dict, object, Tuple]]], + human_prefix: Optional[str] = None, +): """ To all those who have to debug this... RIP """ @@ -86,34 +129,31 @@ def finalize(method: str, data: Union[str, Dict, object, List[Union[str, Dict, o human_readable = data elif isinstance(data, List): if len(data) == 0: - human_readable = '' + human_readable = "" elif isinstance(data[0], str): - human_readable = '\n'.join(data) + human_readable = "\n".join(data) elif isinstance(data[0], dict) or isinstance(data[0], object): - if hasattr(data[0], '__str__'): - human_readable = '\n'.join([str(d) for d in data]) + if hasattr(data[0], "__str__"): + human_readable = "\n".join([str(d) for d in data]) else: if not isinstance(data[0], dict): data = [d.__dict__ for d in data] import tabulate + human_readable = tabulate.tabulate([d.values() for d in data], headers=data[0].keys()) elif isinstance(data[0], tuple): import tabulate + human_readable = tabulate.tabulate(data[1:], headers=data[0]) else: human_readable = data - elif hasattr(data, '__str__'): + elif hasattr(data, "__str__"): human_readable = str(data) else: human_readable = data.__dict__ - human_readable = (human_prefix or '') + str(human_readable) + human_readable = (human_prefix or "") + str(human_readable) if ismachineoutput(): - _machineoutput({ - 'type': 'finalize', - 'method': method, - 'data': data, - 'human': human_readable - }) + _machineoutput({"type": "finalize", "method": method, "data": data, "human": human_readable}) else: echo(human_readable) @@ -121,8 +161,8 @@ def finalize(method: str, data: Union[str, Dict, object, List[Union[str, Dict, o class _MachineOutputProgressBar(_click_ProgressBar): def __init__(self, *args, **kwargs): global _current_notify_value - kwargs['file'] = open(os.devnull, 'w', encoding='UTF-8') - self.notify_value = kwargs.pop('notify_value', _current_notify_value) + kwargs["file"] = open(os.devnull, "w", encoding="UTF-8") + self.notify_value = kwargs.pop("notify_value", _current_notify_value) super(_MachineOutputProgressBar, self).__init__(*args, **kwargs) def __del__(self): @@ -130,10 +170,10 @@ def __del__(self): def render_progress(self): super(_MachineOutputProgressBar, self).render_progress() - obj = {'text': self.label, 'pct': self.pct} + obj = {"text": self.label, "pct": self.pct} if self.show_eta and self.eta_known and not self.finished: - obj['eta'] = self.eta - _machine_notify('progress', obj, self.notify_value) + obj["eta"] = self.eta + _machine_notify("progress", obj, self.notify_value) class Notification(object): @@ -166,26 +206,23 @@ def __init__(self, err: bool = False, ctx: Optional[click.Context] = None): threading.Thread.__init__(self) self.daemon = False self.fdRead, self.fdWrite = os.pipe() - self.pipeReader = os.fdopen(self.fdRead, encoding='UTF-8') + self.pipeReader = os.fdopen(self.fdRead, encoding="UTF-8") self.start() def fileno(self): - """Return the write file descriptor of the pipe - """ + """Return the write file descriptor of the pipe""" return self.fdWrite def run(self): - """Run the thread, logging everything. - """ - for line in iter(self.pipeReader.readline, ''): - echo(line.strip('\n'), ctx=self.click_ctx, err=self.is_err) + """Run the thread, logging everything.""" + for line in iter(self.pipeReader.readline, ""): + echo(line.strip("\n"), ctx=self.click_ctx, err=self.is_err) self.pipeReader.close() def close(self): - """Close the write end of the pipe. - """ + """Close the write end of the pipe.""" os.close(self.fdWrite) -__all__ = ['finalize', 'echo', 'confirm', 'prompt', 'progressbar', 'EchoPipe'] +__all__ = ["finalize", "echo", "confirm", "prompt", "progressbar", "EchoPipe"] diff --git a/pros/common/ui/interactive/ConfirmModal.py b/pros/common/ui/interactive/ConfirmModal.py index d4c59235..f444ec87 100644 --- a/pros/common/ui/interactive/ConfirmModal.py +++ b/pros/common/ui/interactive/ConfirmModal.py @@ -10,8 +10,8 @@ class ConfirmModal(application.Modal[bool]): In --machine-output mode, this Modal is run instead of a textual confirmation request (e.g. click.confirm()) """ - def __init__(self, text: str, abort: bool = False, title: AnyStr = 'Please confirm:', log: Optional[AnyStr] = None): - super().__init__(title, will_abort=abort, confirm_button='Yes', cancel_button='No', description=text) + def __init__(self, text: str, abort: bool = False, title: AnyStr = "Please confirm:", log: Optional[AnyStr] = None): + super().__init__(title, will_abort=abort, confirm_button="Yes", cancel_button="No", description=text) self.log = log def confirm(self): diff --git a/pros/common/ui/interactive/application.py b/pros/common/ui/interactive/application.py index 0db8dfaf..fb8bd85f 100644 --- a/pros/common/ui/interactive/application.py +++ b/pros/common/ui/interactive/application.py @@ -3,7 +3,7 @@ from .components import Component from .observable import Observable -P = TypeVar('P') +P = TypeVar("P") class Application(Observable, Generic[P]): @@ -22,7 +22,7 @@ def __del__(self): self.exit() def on_exit(self, *handlers: Callable): - return super(Application, self).on('end', *handlers) + return super(Application, self).on("end", *handlers) def exit(self, **kwargs): """ @@ -31,24 +31,24 @@ def exit(self, **kwargs): :arg return: set the return value before triggering exit. This value would be the value returned by Renderer.run(Application) """ - if 'return' in kwargs: - self.set_return(kwargs['return']) - self.trigger('end') + if "return" in kwargs: + self.set_return(kwargs["return"]) + self.trigger("end") def on_redraw(self, *handlers: Callable, **kwargs) -> Callable: - return super(Application, self).on('redraw', *handlers, **kwargs) + return super(Application, self).on("redraw", *handlers, **kwargs) def redraw(self) -> None: - self.trigger('redraw') + self.trigger("redraw") def set_return(self, value: P) -> None: """ Set the return value of Renderer.run(Application) """ - self.trigger('return', value) + self.trigger("return", value) def on_return_set(self, *handlers: Callable, **kwargs): - return super(Application, self).on('return', *handlers, **kwargs) + return super(Application, self).on("return", *handlers, **kwargs) @classmethod def get_hierarchy(cls, base: type) -> Optional[List[str]]: @@ -75,7 +75,7 @@ def __getstate__(self): return dict( etype=Application.get_hierarchy(self.__class__), elements=[e.__getstate__() for e in self.build()], - uuid=self.uuid + uuid=self.uuid, ) @@ -84,6 +84,7 @@ class Modal(Application[P], Generic[P]): An Application which is typically displayed in a pop-up box. It has a title, description, continue button, and cancel button. """ + # title of the modal to be displayed title: AnyStr # optional description displayed underneath the Modal @@ -96,9 +97,15 @@ class Modal(Application[P], Generic[P]): # Cancel button text cancel_button: AnyStr - def __init__(self, title: AnyStr, description: Optional[AnyStr] = None, - will_abort: bool = True, confirm_button: AnyStr = 'Continue', cancel_button: AnyStr = 'Cancel', - can_confirm: Optional[bool] = None): + def __init__( + self, + title: AnyStr, + description: Optional[AnyStr] = None, + will_abort: bool = True, + confirm_button: AnyStr = "Continue", + cancel_button: AnyStr = "Cancel", + can_confirm: Optional[bool] = None, + ): super().__init__() self.title = title self.description = description @@ -107,13 +114,13 @@ def __init__(self, title: AnyStr, description: Optional[AnyStr] = None, self.cancel_button = cancel_button self._can_confirm = can_confirm - self.on('confirm', self._confirm) + self.on("confirm", self._confirm) def on_cancel(): nonlocal self self.cancel() - self.on('cancel', on_cancel) + self.on("cancel", on_cancel) def confirm(self, *args, **kwargs): raise NotImplementedError() @@ -133,7 +140,7 @@ def build(self) -> Generator[Component, None, None]: def __getstate__(self): extra_state = {} if self.description is not None: - extra_state['description'] = self.description + extra_state["description"] = self.description return dict( **super(Modal, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/__init__.py b/pros/common/ui/interactive/components/__init__.py index e470f931..419bc371 100644 --- a/pros/common/ui/interactive/components/__init__.py +++ b/pros/common/ui/interactive/components/__init__.py @@ -6,5 +6,17 @@ from .input_groups import ButtonGroup, DropDownBox from .label import Label, Spinner, VerbatimLabel -__all__ = ['Component', 'Button', 'Container', 'InputBox', 'ButtonGroup', 'DropDownBox', 'Label', - 'DirectorySelector', 'FileSelector', 'Checkbox', 'Spinner', 'VerbatimLabel'] +__all__ = [ + "Component", + "Button", + "Container", + "InputBox", + "ButtonGroup", + "DropDownBox", + "Label", + "DirectorySelector", + "FileSelector", + "Checkbox", + "Spinner", + "VerbatimLabel", +] diff --git a/pros/common/ui/interactive/components/button.py b/pros/common/ui/interactive/components/button.py index a3716158..184b930c 100644 --- a/pros/common/ui/interactive/components/button.py +++ b/pros/common/ui/interactive/components/button.py @@ -1,7 +1,7 @@ from typing import * -from .component import Component from ..observable import Observable +from .component import Component class Button(Component, Observable): @@ -14,11 +14,7 @@ def __init__(self, text: AnyStr): self.text = text def on_clicked(self, *handlers: Callable, **kwargs): - return self.on('clicked', *handlers, **kwargs) + return self.on("clicked", *handlers, **kwargs) def __getstate__(self) -> dict: - return dict( - **super(Button, self).__getstate__(), - text=self.text, - uuid=self.uuid - ) + return dict(**super(Button, self).__getstate__(), text=self.text, uuid=self.uuid) diff --git a/pros/common/ui/interactive/components/component.py b/pros/common/ui/interactive/components/component.py index 158fc0bc..500454f1 100644 --- a/pros/common/ui/interactive/components/component.py +++ b/pros/common/ui/interactive/components/component.py @@ -29,12 +29,10 @@ def get_hierarchy(cls, base: type) -> Optional[List[str]]: return None def __getstate__(self) -> Dict: - return dict( - etype=Component.get_hierarchy(self.__class__) - ) + return dict(etype=Component.get_hierarchy(self.__class__)) -P = TypeVar('P', bound=Parameter) +P = TypeVar("P", bound=Parameter) class ParameterizedComponent(Component, Generic[P]): @@ -48,10 +46,10 @@ def __init__(self, parameter: P): def __getstate__(self): extra_state = {} if isinstance(self.parameter, ValidatableParameter): - extra_state['valid'] = self.parameter.is_valid() + extra_state["valid"] = self.parameter.is_valid() reason = self.parameter.is_valid_reason() if reason: - extra_state['valid_reason'] = self.parameter.is_valid_reason() + extra_state["valid_reason"] = self.parameter.is_valid_reason() return dict( **super(ParameterizedComponent, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/container.py b/pros/common/ui/interactive/components/container.py index 8b8615f4..b153c1ac 100644 --- a/pros/common/ui/interactive/components/container.py +++ b/pros/common/ui/interactive/components/container.py @@ -1,6 +1,7 @@ from typing import * from pros.common.ui.interactive.parameters import BooleanParameter + from .component import Component @@ -9,25 +10,24 @@ class Container(Component): A Container has multiple Components, possibly a title, and possibly a description """ - def __init__(self, *elements: Component, - title: Optional[AnyStr] = None, description: Optional[AnyStr] = None, - collapsed: Union[BooleanParameter, bool] = False): + def __init__( + self, + *elements: Component, + title: Optional[AnyStr] = None, + description: Optional[AnyStr] = None, + collapsed: Union[BooleanParameter, bool] = False + ): self.title = title self.description = description self.elements = elements self.collapsed = BooleanParameter(collapsed) if isinstance(collapsed, bool) else collapsed def __getstate__(self): - extra_state = { - 'uuid': self.collapsed.uuid, - 'collapsed': self.collapsed.value - } + extra_state = {"uuid": self.collapsed.uuid, "collapsed": self.collapsed.value} if self.title is not None: - extra_state['title'] = self.title + extra_state["title"] = self.title if self.description is not None: - extra_state['description'] = self.description + extra_state["description"] = self.description return dict( - **super(Container, self).__getstate__(), - **extra_state, - elements=[e.__getstate__() for e in self.elements] + **super(Container, self).__getstate__(), **extra_state, elements=[e.__getstate__() for e in self.elements] ) diff --git a/pros/common/ui/interactive/components/input.py b/pros/common/ui/interactive/components/input.py index 8d35b5e8..8a9a071a 100644 --- a/pros/common/ui/interactive/components/input.py +++ b/pros/common/ui/interactive/components/input.py @@ -15,7 +15,7 @@ def __init__(self, label: AnyStr, parameter: P, placeholder: Optional = None): def __getstate__(self) -> dict: extra_state = {} if self.placeholder is not None: - extra_state['placeholder'] = self.placeholder + extra_state["placeholder"] = self.placeholder return dict( **super(InputBox, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/input_groups.py b/pros/common/ui/interactive/components/input_groups.py index 93171cfd..b8dad1a8 100644 --- a/pros/common/ui/interactive/components/input_groups.py +++ b/pros/common/ui/interactive/components/input_groups.py @@ -1,13 +1,11 @@ from pros.common.ui.interactive.parameters.misc_parameters import OptionParameter + from .component import BasicParameterizedComponent class DropDownBox(BasicParameterizedComponent[OptionParameter]): def __getstate__(self): - return dict( - **super(DropDownBox, self).__getstate__(), - options=self.parameter.options - ) + return dict(**super(DropDownBox, self).__getstate__(), options=self.parameter.options) class ButtonGroup(DropDownBox): diff --git a/pros/common/ui/interactive/components/label.py b/pros/common/ui/interactive/components/label.py index 8b060300..df06ec95 100644 --- a/pros/common/ui/interactive/components/label.py +++ b/pros/common/ui/interactive/components/label.py @@ -8,16 +8,14 @@ def __init__(self, text: AnyStr): self.text = text def __getstate__(self): - return dict( - **super(Label, self).__getstate__(), - text=self.text - ) + return dict(**super(Label, self).__getstate__(), text=self.text) class VerbatimLabel(Label): """ Should be displayed with a monospace font """ + pass @@ -27,4 +25,4 @@ class Spinner(Label): """ def __init__(self): - super(Spinner, self).__init__('Loading...') + super(Spinner, self).__init__("Loading...") diff --git a/pros/common/ui/interactive/observable.py b/pros/common/ui/interactive/observable.py index ec8b0855..61e00178 100644 --- a/pros/common/ui/interactive/observable.py +++ b/pros/common/ui/interactive/observable.py @@ -25,11 +25,16 @@ def notify(cls, uuid, event, *args, **kwargs): if uuid in _uuid_table: _uuid_table[uuid].trigger(event, *args, **kwargs) else: - logger(__name__).warning(f'Could not find an Observable to notify with UUID: {uuid}', sentry=True) + logger(__name__).warning(f"Could not find an Observable to notify with UUID: {uuid}", sentry=True) - def on(self, event, *handlers, - bound_args: Tuple[Any, ...] = None, bound_kwargs: Dict[str, Any] = None, - asynchronous: bool = False) -> Callable: + def on( + self, + event, + *handlers, + bound_args: Tuple[Any, ...] = None, + bound_kwargs: Dict[str, Any] = None, + asynchronous: bool = False, + ) -> Callable: """ Sets up a callable to be called whenenver "event" is triggered :param event: Event to bind to. Most classes expose an e.g. "on_changed" wrapper which provides the correct @@ -49,16 +54,21 @@ def on(self, event, *handlers, bound_kwargs = {} if asynchronous: + def bind(h): def bound(*args, **kw): from threading import Thread + from pros.common.utils import with_click_context + t = Thread(target=with_click_context(h), args=(*bound_args, *args), kwargs={**bound_kwargs, **kw}) t.start() return t return bound + else: + def bind(h): @wraps(h) def bound(*args, **kw): diff --git a/pros/common/ui/interactive/parameters/__init__.py b/pros/common/ui/interactive/parameters/__init__.py index 55c5dafe..9185027b 100644 --- a/pros/common/ui/interactive/parameters/__init__.py +++ b/pros/common/ui/interactive/parameters/__init__.py @@ -2,5 +2,11 @@ from .parameter import Parameter from .validatable_parameter import AlwaysInvalidParameter, ValidatableParameter -__all__ = ['Parameter', 'OptionParameter', 'BooleanParameter', 'ValidatableParameter', 'RangeParameter', - 'AlwaysInvalidParameter'] +__all__ = [ + "Parameter", + "OptionParameter", + "BooleanParameter", + "ValidatableParameter", + "RangeParameter", + "AlwaysInvalidParameter", +] diff --git a/pros/common/ui/interactive/parameters/misc_parameters.py b/pros/common/ui/interactive/parameters/misc_parameters.py index f19edba9..64cc2a38 100644 --- a/pros/common/ui/interactive/parameters/misc_parameters.py +++ b/pros/common/ui/interactive/parameters/misc_parameters.py @@ -3,7 +3,7 @@ from pros.common.ui.interactive.parameters.parameter import Parameter from pros.common.ui.interactive.parameters.validatable_parameter import ValidatableParameter -T = TypeVar('T') +T = TypeVar("T") class OptionParameter(ValidatableParameter, Generic[T]): @@ -17,8 +17,8 @@ def validate(self, value: Any): class BooleanParameter(Parameter[bool]): def update(self, new_value): - true_prefixes = ['T', 'Y'] - true_matches = ['1'] + true_prefixes = ["T", "Y"] + true_matches = ["1"] v = str(new_value).upper() is_true = v in true_matches or any(v.startswith(p) for p in true_prefixes) super(BooleanParameter, self).update(is_true) @@ -33,7 +33,7 @@ def validate(self, value: T): if self.range[0] <= value <= self.range[1]: return True else: - return f'{value} is not within [{self.range[0]}, {self.range[1]}]' + return f"{value} is not within [{self.range[0]}, {self.range[1]}]" def update(self, new_value): super(RangeParameter, self).update(int(new_value)) diff --git a/pros/common/ui/interactive/parameters/parameter.py b/pros/common/ui/interactive/parameters/parameter.py index 1c11eb5e..c1412e9a 100644 --- a/pros/common/ui/interactive/parameters/parameter.py +++ b/pros/common/ui/interactive/parameters/parameter.py @@ -2,7 +2,7 @@ from pros.common.ui.interactive.observable import Observable -T = TypeVar('T') +T = TypeVar("T") class Parameter(Observable, Generic[T]): @@ -17,11 +17,11 @@ def __init__(self, initial_value: T): super().__init__() self.value = initial_value - self.on('update', self.update) + self.on("update", self.update) def update(self, new_value): self.value = new_value - self.trigger('changed', self) + self.trigger("changed", self) def on_changed(self, *handlers: Callable, **kwargs): - return self.on('changed', *handlers, **kwargs) + return self.on("changed", *handlers, **kwargs) diff --git a/pros/common/ui/interactive/parameters/validatable_parameter.py b/pros/common/ui/interactive/parameters/validatable_parameter.py index ceafd59f..e631a045 100644 --- a/pros/common/ui/interactive/parameters/validatable_parameter.py +++ b/pros/common/ui/interactive/parameters/validatable_parameter.py @@ -2,7 +2,7 @@ from pros.common.ui.interactive.parameters.parameter import Parameter -T = TypeVar('T') +T = TypeVar("T") class ValidatableParameter(Parameter, Generic[T]): @@ -13,8 +13,12 @@ class ValidatableParameter(Parameter, Generic[T]): the callback get invoked. This event tag is "changed_validated" """ - def __init__(self, initial_value: T, allow_invalid_input: bool = True, - validate: Optional[Callable[[T], Union[bool, str]]] = None): + def __init__( + self, + initial_value: T, + allow_invalid_input: bool = True, + validate: Optional[Callable[[T], Union[bool, str]]] = None, + ): """ :param allow_invalid_input: Allow invalid input to be propagated to the `changed` event """ @@ -40,19 +44,19 @@ def update(self, new_value): if self.allow_invalid_input or self.is_valid(new_value): super(ValidatableParameter, self).update(new_value) if self.is_valid(): - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) def on_changed(self, *handlers: Callable, **kwargs): """ Subscribe to event whenever value validly changes """ - return self.on('changed_validated', *handlers, **kwargs) + return self.on("changed_validated", *handlers, **kwargs) def on_any_changed(self, *handlers: Callable, **kwargs): """ Subscribe to event whenever value changes (regardless of whether or not new value is valid) """ - return self.on('changed', *handlers, **kwargs) + return self.on("changed", *handlers, **kwargs) class AlwaysInvalidParameter(ValidatableParameter[T], Generic[T]): diff --git a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py index 4bb5eddb..5b348617 100644 --- a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py +++ b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py @@ -6,35 +6,36 @@ from pros.common import ui from pros.common.ui.interactive.observable import Observable -from .Renderer import Renderer + from ..application import Application +from .Renderer import Renderer -current: List['MachineOutputRenderer'] = [] +current: List["MachineOutputRenderer"] = [] -def _push_renderer(renderer: 'MachineOutputRenderer'): +def _push_renderer(renderer: "MachineOutputRenderer"): global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current stack.append(renderer) -def _remove_renderer(renderer: 'MachineOutputRenderer'): +def _remove_renderer(renderer: "MachineOutputRenderer"): global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current if renderer in stack: stack.remove(renderer) -def _current_renderer() -> Optional['MachineOutputRenderer']: +def _current_renderer() -> Optional["MachineOutputRenderer"]: global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current return stack[-1] if len(stack) > 0 else None -P = TypeVar('P') +P = TypeVar("P") class MachineOutputRenderer(Renderer[P], Generic[P]): @@ -54,7 +55,7 @@ def on_redraw(): @staticmethod def get_line(): - line = click.get_text_stream('stdin').readline().strip() + line = click.get_text_stream("stdin").readline().strip() return line.strip() if line is not None else None def run(self) -> P: @@ -72,8 +73,8 @@ def run(self) -> P: try: value = json.loads(line) - if 'uuid' in value and 'event' in value: - Observable.notify(value['uuid'], value['event'], *value.get('args', []), **value.get('kwargs', {})) + if "uuid" in value and "event" in value: + Observable.notify(value["uuid"], value["event"], *value.get("args", []), **value.get("kwargs", {})) except json.JSONDecodeError as e: ui.logger(__name__).exception(e) except BaseException as e: @@ -84,19 +85,16 @@ def run(self) -> P: return self.run_rv def stop(self): - ui.logger(__name__).debug(f'Stopping {self.app}') + ui.logger(__name__).debug(f"Stopping {self.app}") self.alive = False if current_thread() != self.thread: - ui.logger(__name__).debug(f'Interrupting render thread of {self.app}') + ui.logger(__name__).debug(f"Interrupting render thread of {self.app}") while not self.stop_sem.acquire(timeout=0.1): self.wake_me() - ui.logger(__name__).debug(f'Broadcasting stop {self.app}') - self._output({ - 'uuid': self.app.uuid, - 'should_exit': True - }) + ui.logger(__name__).debug(f"Broadcasting stop {self.app}") + self._output({"uuid": self.app.uuid, "should_exit": True}) _remove_renderer(self) top_renderer = _current_renderer() @@ -107,15 +105,15 @@ def wake_me(self): """ Hack to wake up input thread to know to shut down """ - ui.logger(__name__).debug(f'Broadcasting WAKEME for {self.app}') + ui.logger(__name__).debug(f"Broadcasting WAKEME for {self.app}") if ui.ismachineoutput(): - ui._machineoutput({'type': 'wakeme'}) + ui._machineoutput({"type": "wakeme"}) else: - ui.echo('Wake up the renderer!') + ui.echo("Wake up the renderer!") @staticmethod def _output(data: dict): - data['type'] = 'input/interactive' + data["type"] = "input/interactive" if ui.ismachineoutput(): ui._machineoutput(data) else: diff --git a/pros/common/ui/interactive/renderers/Renderer.py b/pros/common/ui/interactive/renderers/Renderer.py index 40f17a0e..2bbebf2a 100644 --- a/pros/common/ui/interactive/renderers/Renderer.py +++ b/pros/common/ui/interactive/renderers/Renderer.py @@ -2,7 +2,7 @@ from ..application import Application -P = TypeVar('P') +P = TypeVar("P") class Renderer(Generic[P]): diff --git a/pros/common/ui/log.py b/pros/common/ui/log.py index 8202ef95..05ec8a85 100644 --- a/pros/common/ui/log.py +++ b/pros/common/ui/log.py @@ -21,18 +21,18 @@ def __init__(self, *args, ctx_obj=None, **kwargs): def emit(self, record): try: - if self.ctx_obj.get('machine_output', False): + if self.ctx_obj.get("machine_output", False): formatter = self.formatter or logging.Formatter() record.message = record.getMessage() obj = { - 'type': 'log/message', - 'level': record.levelname, - 'message': formatter.formatMessage(record), - 'simpleMessage': record.message + "type": "log/message", + "level": record.levelname, + "message": formatter.formatMessage(record), + "simpleMessage": record.message, } if record.exc_info: - obj['trace'] = formatter.formatException(record.exc_info) - msg = f'Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}' + obj["trace"] = formatter.formatException(record.exc_info) + msg = f"Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}" else: msg = self.format(record) click.echo(msg) @@ -47,6 +47,6 @@ class PROSLogFormatter(logging.Formatter): def formatException(self, ei): if not isdebug(): - return '\n'.join(super().formatException(ei).split('\n')[-3:]) + return "\n".join(super().formatException(ei).split("\n")[-3:]) else: return super().formatException(ei) diff --git a/pros/common/utils.py b/pros/common/utils.py index 294da89f..2c771846 100644 --- a/pros/common/utils.py +++ b/pros/common/utils.py @@ -13,14 +13,15 @@ @lru_cache(1) def get_version(): try: - ver = open(os.path.join(os.path.dirname(__file__), '..', '..', 'version')).read().strip() + ver = open(os.path.join(os.path.dirname(__file__), "..", "..", "version")).read().strip() if ver is not None: return ver except: pass try: - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): import _constants + ver = _constants.CLI_VERSION if ver is not None: return ver @@ -32,15 +33,16 @@ def get_version(): pass else: import pros.cli.main + module = pros.cli.main.__name__ for dist in pkg_resources.working_set: - scripts = dist.get_entry_map().get('console_scripts') or {} - for script_name, entry_point in iter(scripts.items()): + scripts = dist.get_entry_map().get("console_scripts") or {} + for _, entry_point in iter(scripts.items()): if entry_point.module_name == module: ver = dist.version if ver is not None: return ver - raise RuntimeError('Could not determine version') + raise RuntimeError("Could not determine version") def retries(func, retry: int = 3): @@ -77,13 +79,13 @@ def ismachineoutput(ctx: click.Context = None) -> bool: if isinstance(ctx, click.Context): ctx.ensure_object(dict) assert isinstance(ctx.obj, dict) - return ctx.obj.get('machine_output', False) + return ctx.obj.get("machine_output", False) else: return False def get_pros_dir(): - return click.get_app_dir('PROS') + return click.get_app_dir("PROS") def with_click_context(func): @@ -91,6 +93,7 @@ def with_click_context(func): if not ctx or not isinstance(ctx, click.Context): return func else: + def _wrap(*args, **kwargs): with ctx: try: @@ -109,16 +112,18 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non :param desc: Description of file being downloaded (for progressbar) :return: The path of the downloaded file, or None if there was an error """ - import requests - from pros.common.ui import progressbar # from rfc6266_parser import parse_requests_response import re + import requests + + from pros.common.ui import progressbar + response = requests.get(url, stream=True) if response.status_code == 200: - filename: str = url.rsplit('/', 1)[-1] - if 'Content-Disposition' in response.headers.keys(): - filename = re.findall("filename=(.+)", response.headers['Content-Disposition'])[0] + filename: str = url.rsplit("/", 1)[-1] + if "Content-Disposition" in response.headers.keys(): + filename = re.findall("filename=(.+)", response.headers["Content-Disposition"])[0] # try: # disposition = parse_requests_response(response) # if isinstance(ext, str): @@ -127,16 +132,17 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non # filename = disposition.filename_unsafe # except RuntimeError: # pass - output_path = os.path.join(get_pros_dir(), 'download', filename) + output_path = os.path.join(get_pros_dir(), "download", filename) if os.path.exists(output_path): os.remove(output_path) elif not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path), exist_ok=True) - with open(output_path, mode='wb') as file: - with progressbar(length=int(response.headers['Content-Length']), - label=desc or f'Downloading {filename}') as pb: + with open(output_path, mode="wb") as file: + with progressbar( + length=int(response.headers["Content-Length"]), label=desc or f"Downloading {filename}" + ) as pb: for chunk in response.iter_content(256): file.write(chunk) pb.update(len(chunk)) diff --git a/pros/conductor/__init__.py b/pros/conductor/__init__.py index 9d8c0406..e866d1b1 100644 --- a/pros/conductor/__init__.py +++ b/pros/conductor/__init__.py @@ -1,6 +1,6 @@ -__all__ = ['BaseTemplate', 'Template', 'LocalTemplate', 'Depot', 'LocalDepot', 'Project', 'Conductor'] +__all__ = ["BaseTemplate", "Template", "LocalTemplate", "Depot", "LocalDepot", "Project", "Conductor"] from .conductor import Conductor from .depots import Depot, LocalDepot from .project import Project -from .templates import BaseTemplate, Template, LocalTemplate +from .templates import BaseTemplate, LocalTemplate, Template diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index fb40d7e1..151e29f8 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -1,11 +1,11 @@ import errno import os.path +import re import shutil +import sys from enum import Enum from pathlib import Path -import sys from typing import * -import re import click from semantic_version import Spec, Version @@ -14,14 +14,15 @@ from pros.conductor.project import TemplateAction from pros.conductor.project.template_resolution import InvalidTemplateException from pros.config import Config + from .depots import Depot, HttpDepot from .project import Project from .templates import BaseTemplate, ExternalTemplate, LocalTemplate, Template -MAINLINE_NAME = 'pros-mainline' -MAINLINE_URL = 'https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json' -EARLY_ACCESS_NAME = 'kernel-early-access-mainline' -EARLY_ACCESS_URL = 'https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json' +MAINLINE_NAME = "pros-mainline" +MAINLINE_URL = "https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json" +EARLY_ACCESS_NAME = "kernel-early-access-mainline" +EARLY_ACCESS_URL = "https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json" """ # TBD? Currently, EarlyAccess value is stored in config file @@ -30,40 +31,35 @@ class ReleaseChannel(Enum): Beta = 'beta' """ + def is_pathname_valid(pathname: str) -> bool: - ''' + """ A more detailed check for path validity than regex. https://stackoverflow.com/a/34102855/11177720 - ''' + """ try: if not isinstance(pathname, str) or not pathname: return False - + _, pathname = os.path.splitdrive(pathname) - - root_dirname = os.environ.get('HOMEDRIVE', 'C:') \ - if sys.platform == 'win32' else os.path.sep + + root_dirname = os.environ.get("HOMEDRIVE", "C:") if sys.platform == "win32" else os.path.sep assert os.path.isdir(root_dirname) - + root_dirname = root_dirname.rstrip(os.path.sep) + os.path.sep for pathname_part in pathname.split(os.path.sep): try: os.lstat(root_dirname + pathname_part) except OSError as exc: - if hasattr(exc, 'winerror'): - if exc.winerror == 123: # ERROR_INVALID_NAME, python doesn't have this constant + if hasattr(exc, "winerror"): + if exc.winerror == 123: # ERROR_INVALID_NAME, python doesn't have this constant return False elif exc.errno in {errno.ENAMETOOLONG, errno.ERANGE}: return False - + # Check for emojis # https://stackoverflow.com/a/62898106/11177720 - ranges = [ - (ord(u'\U0001F300'), ord(u"\U0001FAF6")), # 127744, 129782 - (126980, 127569), - (169, 174), - (8205, 12953) - ] + ranges = [(ord("\U0001F300"), ord("\U0001FAF6")), (126980, 127569), (169, 174), (8205, 12953)] # 127744, 129782 for a_char in pathname: char_code = ord(a_char) for range_min, range_max in ranges: @@ -74,64 +70,65 @@ def is_pathname_valid(pathname: str) -> bool: else: return True + class Conductor(Config): """ Provides entrances for all conductor-related tasks (fetching, applying, creating new projects) """ + def __init__(self, file=None): if not file: - file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') + file = os.path.join(click.get_app_dir("PROS"), "conductor.pros") self.local_templates: Set[LocalTemplate] = set() self.early_access_local_templates: Set[LocalTemplate] = set() self.depots: Dict[str, Depot] = {} - self.default_target: str = 'v5' + self.default_target: str = "v5" self.default_libraries: Dict[str, List[str]] = None self.early_access_libraries: Dict[str, List[str]] = None self.use_early_access = False self.warn_early_access = False super(Conductor, self).__init__(file) needs_saving = False - if MAINLINE_NAME not in self.depots or \ - not isinstance(self.depots[MAINLINE_NAME], HttpDepot) or \ - self.depots[MAINLINE_NAME].location != MAINLINE_URL: + if ( + MAINLINE_NAME not in self.depots + or not isinstance(self.depots[MAINLINE_NAME], HttpDepot) + or self.depots[MAINLINE_NAME].location != MAINLINE_URL + ): self.depots[MAINLINE_NAME] = HttpDepot(MAINLINE_NAME, MAINLINE_URL) needs_saving = True # add early access depot as another remote depot - if EARLY_ACCESS_NAME not in self.depots or \ - not isinstance(self.depots[EARLY_ACCESS_NAME], HttpDepot) or \ - self.depots[EARLY_ACCESS_NAME].location != EARLY_ACCESS_URL: + if ( + EARLY_ACCESS_NAME not in self.depots + or not isinstance(self.depots[EARLY_ACCESS_NAME], HttpDepot) + or self.depots[EARLY_ACCESS_NAME].location != EARLY_ACCESS_URL + ): self.depots[EARLY_ACCESS_NAME] = HttpDepot(EARLY_ACCESS_NAME, EARLY_ACCESS_URL) needs_saving = True if self.default_target is None: - self.default_target = 'v5' + self.default_target = "v5" needs_saving = True if self.default_libraries is None: - self.default_libraries = { - 'v5': ['okapilib'], - 'cortex': [] - } + self.default_libraries = {"v5": ["okapilib"], "cortex": []} needs_saving = True - if self.early_access_libraries is None or len(self.early_access_libraries['v5']) != 2: - self.early_access_libraries = { - 'v5': ['liblvgl', 'okapilib'], - 'cortex': [] - } + if self.early_access_libraries is None or len(self.early_access_libraries["v5"]) != 2: + self.early_access_libraries = {"v5": ["liblvgl", "okapilib"], "cortex": []} needs_saving = True - if 'v5' not in self.default_libraries: - self.default_libraries['v5'] = [] + if "v5" not in self.default_libraries: + self.default_libraries["v5"] = [] needs_saving = True - if 'cortex' not in self.default_libraries: - self.default_libraries['cortex'] = [] + if "cortex" not in self.default_libraries: + self.default_libraries["cortex"] = [] needs_saving = True - if 'v5' not in self.early_access_libraries: - self.early_access_libraries['v5'] = [] + if "v5" not in self.early_access_libraries: + self.early_access_libraries["v5"] = [] needs_saving = True - if 'cortex' not in self.early_access_libraries: - self.early_access_libraries['cortex'] = [] + if "cortex" not in self.early_access_libraries: + self.early_access_libraries["cortex"] = [] needs_saving = True if needs_saving: self.save() from pros.common.sentry import add_context + add_context(self) def get_depot(self, name: str) -> Optional[Depot]: @@ -142,32 +139,34 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca if t.identifier == template.identifier: self.purge_template(t) - if 'destination' in kwargs: # this is deprecated, will work (maybe) but not desirable behavior - destination = kwargs.pop('destination') + if "destination" in kwargs: # this is deprecated, will work (maybe) but not desirable behavior + destination = kwargs.pop("destination") else: - destination = os.path.join(self.directory, 'templates', template.identifier) + destination = os.path.join(self.directory, "templates", template.identifier) if os.path.isdir(destination): shutil.rmtree(destination) template: Template = depot.fetch_template(template, destination, **kwargs) - click.secho(f'Fetched {template.identifier} from {depot.name} depot', dim=True) + click.secho(f"Fetched {template.identifier} from {depot.name} depot", dim=True) local_template = LocalTemplate(orig=template, location=destination) - local_template.metadata['origin'] = depot.name - click.echo(f'Adding {local_template.identifier} to registry...', nl=False) - if depot.name == EARLY_ACCESS_NAME: # check for early access + local_template.metadata["origin"] = depot.name + click.echo(f"Adding {local_template.identifier} to registry...", nl=False) + if depot.name == EARLY_ACCESS_NAME: # check for early access self.early_access_local_templates.add(local_template) else: self.local_templates.add(local_template) self.save() if isinstance(template, ExternalTemplate) and template.directory == destination: template.delete() - click.secho('Done', fg='green') + click.secho("Done", fg="green") return local_template def purge_template(self, template: LocalTemplate): - if template.metadata['origin'] == EARLY_ACCESS_NAME: + if template.metadata["origin"] == EARLY_ACCESS_NAME: if template not in self.early_access_local_templates: - logger(__name__).info(f"{template.identifier} was not in the Conductor's local early access templates cache.") + logger(__name__).info( + f"{template.identifier} was not in the Conductor's local early access templates cache." + ) else: self.early_access_local_templates.remove(template) else: @@ -177,18 +176,24 @@ def purge_template(self, template: LocalTemplate): self.local_templates.remove(template) if os.path.abspath(template.location).startswith( - os.path.abspath(os.path.join(self.directory, 'templates'))) \ - and os.path.isdir(template.location): + os.path.abspath(os.path.join(self.directory, "templates")) + ) and os.path.isdir(template.location): shutil.rmtree(template.location) self.save() - def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: bool = True, - allow_offline: bool = True, force_refresh: bool = False, - unique: bool = True, **kwargs) -> List[BaseTemplate]: + def resolve_templates( + self, + identifier: Union[str, BaseTemplate], + allow_online: bool = True, + allow_offline: bool = True, + force_refresh: bool = False, + unique: bool = True, + **kwargs, + ) -> List[BaseTemplate]: results = list() if not unique else set() - kernel_version = kwargs.get('kernel_version', None) - if kwargs.get('early_access', None) is not None: - use_early_access = kwargs.get('early_access', False) + kernel_version = kwargs.get("kernel_version", None) + if kwargs.get("early_access", None) is not None: + use_early_access = kwargs.get("early_access", False) else: use_early_access = self.use_early_access if isinstance(identifier, str): @@ -199,9 +204,15 @@ def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: offline_results = list() if use_early_access: - offline_results.extend(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates)) + offline_results.extend( + filter( + lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates + ) + ) - offline_results.extend(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates)) + offline_results.extend( + filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates) + ) if unique: results.update(offline_results) @@ -212,40 +223,42 @@ def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: # EarlyAccess depot will only be accessed when the --early-access flag is true if depot.name != EARLY_ACCESS_NAME or (depot.name == EARLY_ACCESS_NAME and use_early_access): remote_templates = depot.get_remote_templates(force_check=force_refresh, **kwargs) - online_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), - remote_templates)) + online_results = list( + filter(lambda t: t.satisfies(query, kernel_version=kernel_version), remote_templates) + ) if unique: results.update(online_results) else: results.extend(online_results) - logger(__name__).debug('Saving Conductor config after checking for remote updates') + logger(__name__).debug("Saving Conductor config after checking for remote updates") self.save() # Save self since there may have been some updates from the depots if len(results) == 0 and not use_early_access: raise dont_send( - InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}')) - + InvalidTemplateException(f"{identifier.name} does not support kernel version {kernel_version}") + ) + return list(results) def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Optional[BaseTemplate]: if isinstance(identifier, str): - kwargs['name'] = identifier + kwargs["name"] = identifier elif isinstance(identifier, BaseTemplate): - kwargs['orig'] = identifier + kwargs["orig"] = identifier query = BaseTemplate.create_query(**kwargs) - logger(__name__).info(f'Query: {query}') + logger(__name__).info(f"Query: {query}") logger(__name__).debug(query.__dict__) templates = self.resolve_templates(query, **kwargs) logger(__name__).info(f'Candidates: {", ".join([str(t) for t in templates])}') if not any(templates): return None - query.version = str(Spec(query.version or '>0').select([Version(t.version) for t in templates])) + query.version = str(Spec(query.version or ">0").select([Version(t.version) for t in templates])) v = Version(query.version) - v.prerelease = v.prerelease if len(v.prerelease) else ('',) - v.build = v.build if len(v.build) else ('',) - query.version = f'=={v}' - logger(__name__).info(f'Resolved to {query.identifier}') + v.prerelease = v.prerelease if len(v.prerelease) else ("",) + v.build = v.build if len(v.build) else ("",) + query.version = f"=={v}" + logger(__name__).info(f"Resolved to {query.identifier}") templates = self.resolve_templates(query, **kwargs) if not any(templates): return None @@ -255,11 +268,11 @@ def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Op # there's a local template satisfying the query if len(local_templates) > 1: # This should never happen! Conductor state must be invalid - raise Exception(f'Multiple local templates satisfy {query.identifier}!') + raise Exception(f"Multiple local templates satisfy {query.identifier}!") return local_templates[0] # prefer pros-mainline template second - mainline_templates = [t for t in templates if t.metadata['origin'] == 'pros-mainline'] + mainline_templates = [t for t in templates if t.metadata["origin"] == "pros-mainline"] if any(mainline_templates): return mainline_templates[0] @@ -267,140 +280,164 @@ def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Op return templates[0] def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], **kwargs): - upgrade_ok = kwargs.get('upgrade_ok', True) - install_ok = kwargs.get('install_ok', True) - downgrade_ok = kwargs.get('downgrade_ok', True) - download_ok = kwargs.get('download_ok', True) - force = kwargs.get('force_apply', False) - - kwargs['target'] = project.target - if 'kernel' in project.templates: + upgrade_ok = kwargs.get("upgrade_ok", True) + install_ok = kwargs.get("install_ok", True) + downgrade_ok = kwargs.get("downgrade_ok", True) + download_ok = kwargs.get("download_ok", True) + force = kwargs.get("force_apply", False) + + kwargs["target"] = project.target + if "kernel" in project.templates: # support_kernels for backwards compatibility, but kernel_version should be getting most of the exposure - kwargs['kernel_version'] = kwargs['supported_kernels'] = project.templates['kernel'].version + kwargs["kernel_version"] = kwargs["supported_kernels"] = project.templates["kernel"].version template = self.resolve_template(identifier=identifier, allow_online=download_ok, **kwargs) if template is None: raise dont_send( - InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}')) + InvalidTemplateException(f"Could not find a template satisfying {identifier} for {project.target}") + ) # warn and prompt user if upgrading to PROS 4 or downgrading to PROS 3 - if template.name == 'kernel': + if template.name == "kernel": isProject = Project.find_project("") if isProject: curr_proj = Project() if curr_proj.kernel: - if template.version[0] == '4' and curr_proj.kernel[0] == '3': - confirm = ui.confirm(f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' - f'Do you still want to upgrade?') + if template.version[0] == "4" and curr_proj.kernel[0] == "3": + confirm = ui.confirm( + f"Warning! Upgrading project to PROS 4 will cause breaking changes. " + f"Do you still want to upgrade?" + ) if not confirm: - raise dont_send( - InvalidTemplateException(f'Not upgrading')) - if template.version[0] == '3' and curr_proj.kernel[0] == '4': - confirm = ui.confirm(f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' - f'Do you still want to downgrade?') + raise dont_send(InvalidTemplateException(f"Not upgrading")) + if template.version[0] == "3" and curr_proj.kernel[0] == "4": + confirm = ui.confirm( + f"Warning! Downgrading project to PROS 3 will cause breaking changes. " + f"Do you still want to downgrade?" + ) if not confirm: - raise dont_send( - InvalidTemplateException(f'Not downgrading')) - elif not project.use_early_access and template.version[0] == '3' and not self.warn_early_access: - confirm = ui.confirm(f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?') + raise dont_send(InvalidTemplateException(f"Not downgrading")) + elif not project.use_early_access and template.version[0] == "3" and not self.warn_early_access: + confirm = ui.confirm( + f"PROS 4 is now in early access. " + f"Please use the --early-access flag if you would like to use it.\n" + f"Do you want to use PROS 4 instead?" + ) self.warn_early_access = True - if confirm: # use pros 4 + if confirm: # use pros 4 project.use_early_access = True project.save() - kwargs['version'] = '>=0' - kwargs['early_access'] = True + kwargs["version"] = ">=0" + kwargs["early_access"] = True # Recall the function with early access enabled return self.apply_template(project, identifier, **kwargs) - + self.save() if not isinstance(template, LocalTemplate): with ui.Notification(): - template = self.fetch_template(self.get_depot(template.metadata['origin']), template, **kwargs) + template = self.fetch_template(self.get_depot(template.metadata["origin"]), template, **kwargs) assert isinstance(template, LocalTemplate) logger(__name__).info(str(project)) valid_action = project.get_template_actions(template) if valid_action == TemplateAction.NotApplicable: raise dont_send( - InvalidTemplateException(f'{template.identifier} is not applicable to {project}', reason=valid_action) + InvalidTemplateException(f"{template.identifier} is not applicable to {project}", reason=valid_action) ) - if force \ - or (valid_action == TemplateAction.Upgradable and upgrade_ok) \ - or (valid_action == TemplateAction.Installable and install_ok) \ - or (valid_action == TemplateAction.Downgradable and downgrade_ok): - project.apply_template(template, force_system=kwargs.pop('force_system', False), - force_user=kwargs.pop('force_user', False), - remove_empty_directories=kwargs.pop('remove_empty_directories', False)) - ui.finalize('apply', f'Finished applying {template.identifier} to {project.location}') + if ( + force + or (valid_action == TemplateAction.Upgradable and upgrade_ok) + or (valid_action == TemplateAction.Installable and install_ok) + or (valid_action == TemplateAction.Downgradable and downgrade_ok) + ): + project.apply_template( + template, + force_system=kwargs.pop("force_system", False), + force_user=kwargs.pop("force_user", False), + remove_empty_directories=kwargs.pop("remove_empty_directories", False), + ) + ui.finalize("apply", f"Finished applying {template.identifier} to {project.location}") elif valid_action != TemplateAction.AlreadyInstalled: raise dont_send( - InvalidTemplateException(f'Could not install {template.identifier} because it is {valid_action.name},' - f' and that is not allowed.', reason=valid_action) + InvalidTemplateException( + f"Could not install {template.identifier} because it is {valid_action.name}," + f" and that is not allowed.", + reason=valid_action, + ) ) else: - ui.finalize('apply', f'{template.identifier} is already installed in {project.location}') + ui.finalize("apply", f"{template.identifier} is already installed in {project.location}") @staticmethod - def remove_template(project: Project, identifier: Union[str, BaseTemplate], remove_user: bool = True, - remove_empty_directories: bool = True): - ui.logger(__name__).debug(f'Uninstalling templates matching {identifier}') + def remove_template( + project: Project, + identifier: Union[str, BaseTemplate], + remove_user: bool = True, + remove_empty_directories: bool = True, + ): + ui.logger(__name__).debug(f"Uninstalling templates matching {identifier}") if not project.resolve_template(identifier): ui.echo(f"{identifier} is not an applicable template") for template in project.resolve_template(identifier): - ui.echo(f'Uninstalling {template.identifier}') - project.remove_template(template, remove_user=remove_user, - remove_empty_directories=remove_empty_directories) + ui.echo(f"Uninstalling {template.identifier}") + project.remove_template( + template, remove_user=remove_user, remove_empty_directories=remove_empty_directories + ) def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: - if kwargs.get('early_access', None) is not None: - use_early_access = kwargs.get('early_access', False) + if kwargs.get("early_access", None) is not None: + use_early_access = kwargs.get("early_access", False) else: use_early_access = self.use_early_access kwargs["early_access"] = use_early_access - if kwargs["version_source"]: # If true, then the user has not specified a version + if kwargs["version_source"]: # If true, then the user has not specified a version if not use_early_access and self.warn_early_access: - ui.echo(f"PROS 4 is now in early access. " - f"If you would like to use it, use the --early-access flag.") + ui.echo(f"PROS 4 is now in early access. " f"If you would like to use it, use the --early-access flag.") elif not use_early_access and not self.warn_early_access: - confirm = ui.confirm(f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?') + confirm = ui.confirm( + f"PROS 4 is now in early access. " + f"Please use the --early-access flag if you would like to use it.\n" + f"Do you want to use PROS 4 instead?" + ) self.warn_early_access = True if confirm: use_early_access = True - kwargs['early_access'] = True + kwargs["early_access"] = True elif use_early_access: - ui.echo(f'Early access is enabled. Using PROS 4.') + ui.echo(f"Early access is enabled. Using PROS 4.") elif use_early_access: - ui.echo(f'Early access is enabled.') + ui.echo(f"Early access is enabled.") if not is_pathname_valid(str(Path(path).absolute())): - raise dont_send(ValueError('Project path contains invalid characters.')) - - if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): - raise dont_send(ValueError('Will not create a project in user home directory')) - + raise dont_send(ValueError("Project path contains invalid characters.")) + + if Path(path).exists() and Path(path).samefile(os.path.expanduser("~")): + raise dont_send(ValueError("Will not create a project in user home directory")) + proj = Project(path=path, create=True, early_access=use_early_access) - if 'target' in kwargs: - proj.target = kwargs['target'] - if 'project_name' in kwargs and kwargs['project_name'] and not kwargs['project_name'].isspace(): - proj.project_name = kwargs['project_name'] + if "target" in kwargs: + proj.target = kwargs["target"] + if "project_name" in kwargs and kwargs["project_name"] and not kwargs["project_name"].isspace(): + proj.project_name = kwargs["project_name"] else: proj.project_name = os.path.basename(os.path.normpath(os.path.abspath(path))) - if 'version' in kwargs: - if kwargs['version'] == 'latest': - kwargs['version'] = '>=0' - self.apply_template(proj, identifier='kernel', **kwargs) + if "version" in kwargs: + if kwargs["version"] == "latest": + kwargs["version"] = ">=0" + self.apply_template(proj, identifier="kernel", **kwargs) proj.save() if not no_default_libs: - libraries = self.early_access_libraries if proj.use_early_access and (kwargs.get("version", ">").startswith("4") or kwargs.get("version", ">").startswith(">")) else self.default_libraries + libraries = ( + self.early_access_libraries + if proj.use_early_access + and (kwargs.get("version", ">").startswith("4") or kwargs.get("version", ">").startswith(">")) + else self.default_libraries + ) for library in libraries[proj.target]: try: # remove kernel version so that latest template satisfying query is correctly selected - if 'version' in kwargs: - kwargs.pop('version') + if "version" in kwargs: + kwargs.pop("version") self.apply_template(proj, library, **kwargs) except Exception as e: logger(__name__).exception(e) @@ -413,6 +450,6 @@ def add_depot(self, name: str, url: str): def remove_depot(self, name: str): del self.depots[name] self.save() - + def query_depots(self, url: bool): - return [name + ((' -- ' + depot.location) if url else '') for name, depot in self.depots.items()] + return [name + ((" -- " + depot.location) if url else "") for name, depot in self.depots.items()] diff --git a/pros/conductor/depots.md b/pros/conductor/depots.md index 33a92336..f4efcf3f 100644 --- a/pros/conductor/depots.md +++ b/pros/conductor/depots.md @@ -13,7 +13,7 @@ $ pros conduct add-depot test "https://pros.cs.purdue.edu/v5/_static/beta/testin `pros conduct remove-depot ` Example: -```bash +```bash $ pros conduct remove-depot test > Removed depot test ``` @@ -28,11 +28,11 @@ Examples: ```bash $ pros conduct query-depots --url > Available Depots: -> +> > kernel-beta-mainline -- https://raw.githubusercontent.com/purduesigbots/pros-mainline/master/beta/kernel-beta-mainline.json > pros-mainline -- https://purduesigbots.github.io/pros-mainline/pros-mainline.json > test -- https://pros.cs.purdue.edu/v5/_static/beta/testing-mainline.json -> +> ``` ```bash $ pros conduct query-depots @@ -41,5 +41,5 @@ $ pros conduct query-depots > kernel-beta-mainline > pros-mainline > test -> +> ``` diff --git a/pros/conductor/depots/depot.py b/pros/conductor/depots/depot.py index 364d312f..33b349f2 100644 --- a/pros/conductor/depots/depot.py +++ b/pros/conductor/depots/depot.py @@ -4,13 +4,19 @@ import pros.common.ui as ui from pros.common import logger from pros.config.cli_config import cli_config + from ..templates import BaseTemplate, Template class Depot(object): - def __init__(self, name: str, location: str, config: Dict[str, Any] = None, - update_frequency: timedelta = timedelta(minutes=1), - config_schema: Dict[str, Dict[str, Any]] = None): + def __init__( + self, + name: str, + location: str, + config: Dict[str, Any] = None, + update_frequency: timedelta = timedelta(minutes=1), + config_schema: Dict[str, Dict[str, Any]] = None, + ): self.name: str = name self.location: str = location self.config: Dict[str, Any] = config or {} @@ -27,14 +33,16 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> def get_remote_templates(self, auto_check_freq: Optional[timedelta] = None, force_check: bool = False, **kwargs): if auto_check_freq is None: - auto_check_freq = getattr(self, 'update_frequency', cli_config().update_frequency) - logger(__name__).info(f'Last check of {self.name} was {self.last_remote_update} ' - f'({datetime.now() - self.last_remote_update} vs {auto_check_freq}).') + auto_check_freq = getattr(self, "update_frequency", cli_config().update_frequency) + logger(__name__).info( + f"Last check of {self.name} was {self.last_remote_update} " + f"({datetime.now() - self.last_remote_update} vs {auto_check_freq})." + ) if force_check or datetime.now() - self.last_remote_update > auto_check_freq: with ui.Notification(): - ui.echo(f'Updating {self.name}... ', nl=False) + ui.echo(f"Updating {self.name}... ", nl=False) self.update_remote_templates(**kwargs) - ui.echo('Done', color='green') + ui.echo("Done", color="green") for t in self.remote_templates: - t.metadata['origin'] = self.name + t.metadata["origin"] = self.name return self.remote_templates diff --git a/pros/conductor/depots/http_depot.py b/pros/conductor/depots/http_depot.py index dc7e3a25..feda9472 100644 --- a/pros/conductor/depots/http_depot.py +++ b/pros/conductor/depots/http_depot.py @@ -7,37 +7,39 @@ import pros.common.ui as ui from pros.common import logger from pros.common.utils import download_file -from .depot import Depot + from ..templates import BaseTemplate, ExternalTemplate +from .depot import Depot class HttpDepot(Depot): def __init__(self, name: str, location: str): # Note: If update_frequency = timedelta(minutes=1) isn't included as a parameter, # the beta depot won't be saved in conductor.json correctly - super().__init__(name, location, config_schema={}, update_frequency = timedelta(minutes=1)) + super().__init__(name, location, config_schema={}, update_frequency=timedelta(minutes=1)) def fetch_template(self, template: BaseTemplate, destination: str, **kwargs): import requests - assert 'location' in template.metadata - url = template.metadata['location'] - tf = download_file(url, ext='zip', desc=f'Downloading {template.identifier}') + + assert "location" in template.metadata + url = template.metadata["location"] + tf = download_file(url, ext="zip", desc=f"Downloading {template.identifier}") if tf is None: - raise requests.ConnectionError(f'Could not obtain {url}') + raise requests.ConnectionError(f"Could not obtain {url}") with zipfile.ZipFile(tf) as zf: - with ui.progressbar(length=len(zf.namelist()), - label=f'Extracting {template.identifier}') as pb: + with ui.progressbar(length=len(zf.namelist()), label=f"Extracting {template.identifier}") as pb: for file in zf.namelist(): zf.extract(file, path=destination) pb.update(1) os.remove(tf) - return ExternalTemplate(file=os.path.join(destination, 'template.pros')) + return ExternalTemplate(file=os.path.join(destination, "template.pros")) def update_remote_templates(self, **_): import requests + response = requests.get(self.location) if response.status_code == 200: self.remote_templates = jsonpickle.decode(response.text) else: - logger(__name__).warning(f'Unable to access {self.name} ({self.location}): {response.status_code}') + logger(__name__).warning(f"Unable to access {self.name} ({self.location}): {response.status_code}") self.last_remote_update = datetime.now() diff --git a/pros/conductor/depots/local_depot.py b/pros/conductor/depots/local_depot.py index 60bff121..0dbdb9a6 100644 --- a/pros/conductor/depots/local_depot.py +++ b/pros/conductor/depots/local_depot.py @@ -4,31 +4,31 @@ import click +from pros.common.utils import logger from pros.config import ConfigNotFoundException + +from ..templates import BaseTemplate, ExternalTemplate, Template from .depot import Depot -from ..templates import BaseTemplate, Template, ExternalTemplate -from pros.common.utils import logger class LocalDepot(Depot): def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> Template: - if 'location' not in kwargs: + if "location" not in kwargs: logger(__name__).debug(f"Template not specified. Provided arguments: {kwargs}") - raise KeyError('Location of local template must be specified.') - location = kwargs['location'] + raise KeyError("Location of local template must be specified.") + location = kwargs["location"] if os.path.isdir(location): location_dir = location - if not os.path.isfile(os.path.join(location_dir, 'template.pros')): - raise ConfigNotFoundException(f'A template.pros file was not found in {location_dir}.') - template_file = os.path.join(location_dir, 'template.pros') + if not os.path.isfile(os.path.join(location_dir, "template.pros")): + raise ConfigNotFoundException(f"A template.pros file was not found in {location_dir}.") + template_file = os.path.join(location_dir, "template.pros") elif zipfile.is_zipfile(location): with zipfile.ZipFile(location) as zf: - with click.progressbar(length=len(zf.namelist()), - label=f"Extracting {location}") as progress_bar: + with click.progressbar(length=len(zf.namelist()), label=f"Extracting {location}") as progress_bar: for file in zf.namelist(): zf.extract(file, path=destination) progress_bar.update(1) - template_file = os.path.join(destination, 'template.pros') + template_file = os.path.join(destination, "template.pros") location_dir = destination elif os.path.isfile(location): location_dir = os.path.dirname(location) @@ -40,7 +40,8 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> raise ValueError(f"The specified location was not a file or directory ({location}).") if location_dir != destination: n_files = len([os.path.join(dp, f) for dp, dn, fn in os.walk(location_dir) for f in fn]) - with click.progressbar(length=n_files, label='Copying to local cache') as pb: + with click.progressbar(length=n_files, label="Copying to local cache") as pb: + def my_copy(*args): pb.update(1) shutil.copy2(*args) @@ -49,4 +50,4 @@ def my_copy(*args): return ExternalTemplate(file=template_file) def __init__(self): - super().__init__('local', 'local') + super().__init__("local", "local") diff --git a/pros/conductor/interactive/NewProjectModal.py b/pros/conductor/interactive/NewProjectModal.py index 9f71c76d..552f0b73 100644 --- a/pros/conductor/interactive/NewProjectModal.py +++ b/pros/conductor/interactive/NewProjectModal.py @@ -6,20 +6,25 @@ from pros.common import ui from pros.common.ui.interactive import application, components, parameters from pros.conductor import Conductor + from .parameters import NonExistentProjectParameter class NewProjectModal(application.Modal[None]): - targets = parameters.OptionParameter('v5', ['v5', 'cortex']) - kernel_versions = parameters.OptionParameter('latest', ['latest']) + targets = parameters.OptionParameter("v5", ["v5", "cortex"]) + kernel_versions = parameters.OptionParameter("latest", ["latest"]) install_default_libraries = parameters.BooleanParameter(True) project_name = parameters.Parameter(None) advanced_collapsed = parameters.BooleanParameter(True) - def __init__(self, ctx: Context = None, conductor: Optional[Conductor] = None, - directory=os.path.join(os.path.expanduser('~'), 'My PROS Project')): - super().__init__('Create a new project') + def __init__( + self, + ctx: Context = None, + conductor: Optional[Conductor] = None, + directory=os.path.join(os.path.expanduser("~"), "My PROS Project"), + ): + super().__init__("Create a new project") self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() self.directory = NonExistentProjectParameter(directory) @@ -28,11 +33,11 @@ def __init__(self, ctx: Context = None, conductor: Optional[Conductor] = None, cb(self.targets) def target_changed(self, new_target): - templates = self.conductor.resolve_templates('kernel', target=new_target.value) + templates = self.conductor.resolve_templates("kernel", target=new_target.value) if len(templates) == 0: - self.kernel_versions.options = ['latest'] + self.kernel_versions.options = ["latest"] else: - self.kernel_versions.options = ['latest'] + sorted({t.version for t in templates}, reverse=True) + self.kernel_versions.options = ["latest"] + sorted({t.version for t in templates}, reverse=True) self.redraw() def confirm(self, *args, **kwargs): @@ -43,15 +48,16 @@ def confirm(self, *args, **kwargs): target=self.targets.value, version=self.kernel_versions.value, no_default_libs=not self.install_default_libraries.value, - project_name=self.project_name.value + project_name=self.project_name.value, ) from pros.conductor.project import ProjectReport + report = ProjectReport(project) - ui.finalize('project-report', report) + ui.finalize("project-report", report) with ui.Notification(): - ui.echo('Building project...') + ui.echo("Building project...") project.compile([]) @property @@ -59,15 +65,15 @@ def can_confirm(self): return self.directory.is_valid() and self.targets.is_valid() and self.kernel_versions.is_valid() def build(self) -> Generator[components.Component, None, None]: - yield components.DirectorySelector('Project Directory', self.directory) - yield components.ButtonGroup('Target', self.targets) + yield components.DirectorySelector("Project Directory", self.directory) + yield components.ButtonGroup("Target", self.targets) project_name_placeholder = os.path.basename(os.path.normpath(os.path.abspath(self.directory.value))) yield components.Container( - components.InputBox('Project Name', self.project_name, placeholder=project_name_placeholder), - components.DropDownBox('Kernel Version', self.kernel_versions), - components.Checkbox('Install default libraries', self.install_default_libraries), - title='Advanced', - collapsed=self.advanced_collapsed + components.InputBox("Project Name", self.project_name, placeholder=project_name_placeholder), + components.DropDownBox("Kernel Version", self.kernel_versions), + components.Checkbox("Install default libraries", self.install_default_libraries), + title="Advanced", + collapsed=self.advanced_collapsed, ) diff --git a/pros/conductor/interactive/UpdateProjectModal.py b/pros/conductor/interactive/UpdateProjectModal.py index 9cb5124e..4e3943cd 100644 --- a/pros/conductor/interactive/UpdateProjectModal.py +++ b/pros/conductor/interactive/UpdateProjectModal.py @@ -8,6 +8,7 @@ from pros.common.ui.interactive import application, components, parameters from pros.conductor import BaseTemplate, Conductor, Project from pros.conductor.project.ProjectTransaction import ProjectTransaction + from .components import TemplateListingComponent from .parameters import ExistingProjectParameter, TemplateParameter @@ -25,9 +26,7 @@ def is_processing(self, value: bool): def _generate_transaction(self) -> ProjectTransaction: transaction = ProjectTransaction(self.project, self.conductor) - apply_kwargs = dict( - force_apply=self.force_apply_parameter.value - ) + apply_kwargs = dict(force_apply=self.force_apply_parameter.value) if self.name.value != self.project.name: transaction.change_name(self.name.value) if self.project.template_is_applicable(self.current_kernel.value, **apply_kwargs): @@ -47,22 +46,23 @@ def _add_template(self): ui.logger(__name__).debug(options) p = TemplateParameter(None, options) - @p.on('removed') + @p.on("removed") def remove_template(): self.new_templates.remove(p) self.new_templates.append(p) - def __init__(self, ctx: Optional[Context] = None, conductor: Optional[Conductor] = None, - project: Optional[Project] = None): - super().__init__('Update a project') + def __init__( + self, ctx: Optional[Context] = None, conductor: Optional[Conductor] = None, project: Optional[Project] = None + ): + super().__init__("Update a project") self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() self._is_processing = False self.project: Optional[Project] = project self.project_path = ExistingProjectParameter( - str(project.location) if project else os.path.join(os.path.expanduser('~'), 'My PROS Project') + str(project.location) if project else os.path.join(os.path.expanduser("~"), "My PROS Project") ) self.name = parameters.Parameter(None) @@ -74,7 +74,7 @@ def __init__(self, ctx: Optional[Context] = None, conductor: Optional[Conductor] self.templates_collapsed = parameters.BooleanParameter(False) self.advanced_collapsed = parameters.BooleanParameter(True) - self.add_template_button = components.Button('Add Template') + self.add_template_button = components.Button("Add Template") self.add_template_button.on_clicked(self._add_template) @@ -92,20 +92,22 @@ def project_changed(self, new_project: ExistingProjectParameter): self.current_kernel = TemplateParameter( None, options=sorted( - {t for t in self.conductor.resolve_templates(self.project.templates['kernel'].as_query())}, - key=lambda v: Version(v.version), reverse=True - ) + {t for t in self.conductor.resolve_templates(self.project.templates["kernel"].as_query())}, + key=lambda v: Version(v.version), + reverse=True, + ), ) self.current_templates = [ TemplateParameter( None, - options=sorted({ - t - for t in self.conductor.resolve_templates(t.as_query()) - }, key=lambda v: Version(v.version), reverse=True) + options=sorted( + {t for t in self.conductor.resolve_templates(t.as_query())}, + key=lambda v: Version(v.version), + reverse=True, + ), ) for t in self.project.templates.values() - if t.name != 'kernel' + if t.name != "kernel" ] self.new_templates = [] @@ -122,26 +124,28 @@ def can_confirm(self): return self.project and self._generate_transaction().can_execute() def build(self) -> Generator[components.Component, None, None]: - yield components.DirectorySelector('Project Directory', self.project_path) + yield components.DirectorySelector("Project Directory", self.project_path) if self.is_processing: yield components.Spinner() elif self.project_path.is_valid(): assert self.project is not None - yield components.Label(f'Modify your {self.project.target} project.') - yield components.InputBox('Project Name', self.name) + yield components.Label(f"Modify your {self.project.target} project.") + yield components.InputBox("Project Name", self.name) yield TemplateListingComponent(self.current_kernel, editable=dict(version=True), removable=False) yield components.Container( - *(TemplateListingComponent(t, editable=dict(version=True), removable=True) for t in - self.current_templates), + *( + TemplateListingComponent(t, editable=dict(version=True), removable=True) + for t in self.current_templates + ), *(TemplateListingComponent(t, editable=True, removable=True) for t in self.new_templates), self.add_template_button, - title='Templates', - collapsed=self.templates_collapsed + title="Templates", + collapsed=self.templates_collapsed, ) yield components.Container( - components.Checkbox('Re-apply all templates', self.force_apply_parameter), - title='Advanced', - collapsed=self.advanced_collapsed + components.Checkbox("Re-apply all templates", self.force_apply_parameter), + title="Advanced", + collapsed=self.advanced_collapsed, ) yield components.Label('What will happen when you click "Continue":') yield components.VerbatimLabel(self._generate_transaction().describe()) diff --git a/pros/conductor/interactive/__init__.py b/pros/conductor/interactive/__init__.py index 89f1e51c..d915ee91 100644 --- a/pros/conductor/interactive/__init__.py +++ b/pros/conductor/interactive/__init__.py @@ -1,4 +1,3 @@ from .NewProjectModal import NewProjectModal -from .UpdateProjectModal import UpdateProjectModal - from .parameters import ExistingProjectParameter, NonExistentProjectParameter +from .UpdateProjectModal import UpdateProjectModal diff --git a/pros/conductor/interactive/components.py b/pros/conductor/interactive/components.py index b5bfacb7..e44b702a 100644 --- a/pros/conductor/interactive/components.py +++ b/pros/conductor/interactive/components.py @@ -7,28 +7,28 @@ class TemplateListingComponent(components.Container): def _generate_components(self) -> Generator[components.Component, None, None]: - if not self.editable['name'] and not self.editable['version']: + if not self.editable["name"] and not self.editable["version"]: yield components.Label(self.template.value.identifier) else: - if self.editable['name']: - yield components.InputBox('Name', self.template.name) + if self.editable["name"]: + yield components.InputBox("Name", self.template.name) else: yield components.Label(self.template.value.name) - if self.editable['version']: + if self.editable["version"]: if isinstance(self.template.version, parameters.OptionParameter): - yield components.DropDownBox('Version', self.template.version) + yield components.DropDownBox("Version", self.template.version) else: - yield components.InputBox('Version', self.template.version) + yield components.InputBox("Version", self.template.version) else: yield components.Label(self.template.value.version) if self.removable: - remove_button = components.Button('Don\'t remove' if self.template.removed else 'Remove') - remove_button.on_clicked(lambda: self.template.trigger('removed')) + remove_button = components.Button("Don't remove" if self.template.removed else "Remove") + remove_button.on_clicked(lambda: self.template.trigger("removed")) yield remove_button - def __init__(self, template: TemplateParameter, - removable: bool = False, - editable: Union[Dict[str, bool], bool] = True): + def __init__( + self, template: TemplateParameter, removable: bool = False, editable: Union[Dict[str, bool], bool] = True + ): self.template = template self.removable = removable if isinstance(editable, bool): diff --git a/pros/conductor/interactive/parameters.py b/pros/conductor/interactive/parameters.py index 7b0da738..486a0719 100644 --- a/pros/conductor/interactive/parameters.py +++ b/pros/conductor/interactive/parameters.py @@ -13,22 +13,24 @@ class NonExistentProjectParameter(p.ValidatableParameter[str]): def validate(self, value: str) -> Union[bool, str]: value = os.path.abspath(value) if os.path.isfile(value): - return 'Path is a file' + return "Path is a file" if os.path.isdir(value) and not os.access(value, os.W_OK): - return 'Do not have write permission to path' + return "Do not have write permission to path" if Project.find_project(value) is not None: - return 'Project path already exists, delete it first' + return "Project path already exists, delete it first" blacklisted_directories = [] # TODO: Proper Windows support - if sys.platform == 'win32': - blacklisted_directories.extend([ - os.environ.get('WINDIR', os.path.join('C:', 'Windows')), - os.environ.get('PROGRAMFILES', os.path.join('C:', 'Program Files')) - ]) + if sys.platform == "win32": + blacklisted_directories.extend( + [ + os.environ.get("WINDIR", os.path.join("C:", "Windows")), + os.environ.get("PROGRAMFILES", os.path.join("C:", "Program Files")), + ] + ) if any(value.startswith(d) for d in blacklisted_directories): - return 'Cannot create project in a system directory' - if Path(value).exists() and Path(value).samefile(os.path.expanduser('~')): - return 'Should not create a project in home directory' + return "Cannot create project in a system directory" + if Path(value).exists() and Path(value).samefile(os.path.expanduser("~")): + return "Should not create a project in home directory" if not os.path.exists(value): parent = os.path.split(value)[0] while parent and not os.path.exists(parent): @@ -37,13 +39,13 @@ def validate(self, value: str) -> Union[bool, str]: break parent = temp_value if not parent: - return 'Cannot create directory because root does not exist' + return "Cannot create directory because root does not exist" if not os.path.exists(parent): - return f'Cannot create directory because {parent} does not exist' + return f"Cannot create directory because {parent} does not exist" if not os.path.isdir(parent): - return f'Cannot create directory because {parent} is a file' + return f"Cannot create directory because {parent} is a file" if not os.access(parent, os.W_OK | os.X_OK): - return f'Cannot create directory because missing write permissions to {parent}' + return f"Cannot create directory because missing write permissions to {parent}" return True @@ -56,7 +58,7 @@ def update(self, new_value): def validate(self, value: str): project = Project.find_project(value) - return project is not None or 'Path is not inside a PROS project' + return project is not None or "Path is not inside a PROS project" class TemplateParameter(p.ValidatableParameter[BaseTemplate]): @@ -64,36 +66,36 @@ def _update_versions(self): if self.name.value in self.options: self.version = p.OptionParameter( self.version.value if self.version else None, - list(sorted(self.options[self.name.value].keys(), reverse=True, key=lambda v: Version(v))) + list(sorted(self.options[self.name.value].keys(), reverse=True, key=lambda v: Version(v))), ) if self.version.value not in self.version.options: self.version.value = self.version.options[0] self.value = self.options[self.name.value][self.version.value] - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) else: self.version = p.AlwaysInvalidParameter(self.value.version) def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate], allow_invalid_input: bool = True): if not template and len(options) == 0: - raise ValueError('At least template or versions must be defined for a TemplateParameter') + raise ValueError("At least template or versions must be defined for a TemplateParameter") self.options = {t.name: {_t.version: _t for _t in options if t.name == _t.name} for t in options} if not template: first_template = list(self.options.values())[0] - template = first_template[str(Spec('>0').select([Version(v) for v in first_template.keys()]))] + template = first_template[str(Spec(">0").select([Version(v) for v in first_template.keys()]))] super().__init__(template, allow_invalid_input) self.name: p.ValidatableParameter[str] = p.ValidatableParameter( self.value.name, allow_invalid_input, - validate=lambda v: True if v in self.options.keys() else f'Could not find a template named {v}' + validate=lambda v: True if v in self.options.keys() else f"Could not find a template named {v}", ) if not self.value.version and self.value.name in self.options: - self.value.version = Spec('>0').select([Version(v) for v in self.options[self.value.name].keys()]) + self.value.version = Spec(">0").select([Version(v) for v in self.options[self.value.name].keys()]) self.version = None self._update_versions() @@ -101,26 +103,27 @@ def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate] @self.name.on_any_changed def name_any_changed(v: p.ValidatableParameter): self._update_versions() - self.trigger('changed', self) + self.trigger("changed", self) @self.version.on_any_changed def version_any_changed(v: p.ValidatableParameter): if v.value in self.options[self.name.value].keys(): self.value = self.options[self.name.value][v.value] - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) else: self.value.version = v.value - self.trigger('changed', self) + self.trigger("changed", self) # self.name.on_changed(lambda v: self.trigger('changed_validated', self)) # self.version.on_changed(lambda v: self.trigger('changed_validated', self)) self.removed = False - @self.on('removed') + @self.on("removed") def removed_changed(): self.removed = not self.removed def is_valid(self, value: BaseTemplate = None): - return self.name.is_valid(value.name if value else None) and \ - self.version.is_valid(value.version if value else None) + return self.name.is_valid(value.name if value else None) and self.version.is_valid( + value.version if value else None + ) diff --git a/pros/conductor/project/ProjectReport.py b/pros/conductor/project/ProjectReport.py index 75d2ff3a..683964d9 100644 --- a/pros/conductor/project/ProjectReport.py +++ b/pros/conductor/project/ProjectReport.py @@ -2,20 +2,25 @@ class ProjectReport(object): - def __init__(self, project: 'Project'): + def __init__(self, project: "Project"): self.project = { "target": project.target, "location": os.path.abspath(project.location), "name": project.name, - "templates": [{"name": t.name, "version": t.version, "origin": t.origin} for t in - project.templates.values()] + "templates": [ + {"name": t.name, "version": t.version, "origin": t.origin} for t in project.templates.values() + ], } def __str__(self): import tabulate - s = f'PROS Project for {self.project["target"]} at: {self.project["location"]}' \ - f' ({self.project["name"]})' if self.project["name"] else '' - s += '\n' + + s = ( + f'PROS Project for {self.project["target"]} at: {self.project["location"]}' f' ({self.project["name"]})' + if self.project["name"] + else "" + ) + s += "\n" rows = [t.values() for t in self.project["templates"]] headers = [h.capitalize() for h in self.project["templates"][0].keys()] s += tabulate.tabulate(rows, headers=headers) diff --git a/pros/conductor/project/ProjectTransaction.py b/pros/conductor/project/ProjectTransaction.py index 14034d42..973cedd5 100644 --- a/pros/conductor/project/ProjectTransaction.py +++ b/pros/conductor/project/ProjectTransaction.py @@ -22,8 +22,9 @@ def can_execute(self, conductor: c.Conductor, project: c.Project) -> bool: class ApplyTemplateAction(Action): - def __init__(self, template: c.BaseTemplate, apply_kwargs: Dict[str, Any] = None, - suppress_already_installed: bool = False): + def __init__( + self, template: c.BaseTemplate, apply_kwargs: Dict[str, Any] = None, suppress_already_installed: bool = False + ): self.template = template self.apply_kwargs = apply_kwargs or {} self.suppress_already_installed = suppress_already_installed @@ -36,38 +37,42 @@ def execute(self, conductor: c.Conductor, project: c.Project): raise e else: ui.logger(__name__).warning(str(e)) - return None def describe(self, conductor: c.Conductor, project: c.Project): action = project.get_template_actions(conductor.resolve_template(self.template)) if action == TemplateAction.NotApplicable: - return f'{self.template.identifier} cannot be applied to project!' + return f"{self.template.identifier} cannot be applied to project!" if action == TemplateAction.Installable: - return f'{self.template.identifier} will installed to project.' + return f"{self.template.identifier} will installed to project." if action == TemplateAction.Downgradable: - return f'Project will be downgraded to {self.template.identifier} from' \ - f' {project.templates[self.template.name].version}.' + return ( + f"Project will be downgraded to {self.template.identifier} from" + f" {project.templates[self.template.name].version}." + ) if action == TemplateAction.Upgradable: - return f'Project will be upgraded to {self.template.identifier} from' \ - f' {project.templates[self.template.name].version}.' + return ( + f"Project will be upgraded to {self.template.identifier} from" + f" {project.templates[self.template.name].version}." + ) if action == TemplateAction.AlreadyInstalled: - if self.apply_kwargs.get('force_apply'): - return f'{self.template.identifier} will be re-applied.' + if self.apply_kwargs.get("force_apply"): + return f"{self.template.identifier} will be re-applied." elif self.suppress_already_installed: - return f'{self.template.identifier} will not be re-applied.' + return f"{self.template.identifier} will not be re-applied." else: - return f'{self.template.identifier} cannot be applied to project because it is already installed.' + return f"{self.template.identifier} cannot be applied to project because it is already installed." def can_execute(self, conductor: c.Conductor, project: c.Project) -> bool: action = project.get_template_actions(conductor.resolve_template(self.template)) if action == TemplateAction.AlreadyInstalled: - return self.apply_kwargs.get('force_apply') or self.suppress_already_installed + return self.apply_kwargs.get("force_apply") or self.suppress_already_installed return action in [TemplateAction.Installable, TemplateAction.Downgradable, TemplateAction.Upgradable] class RemoveTemplateAction(Action): - def __init__(self, template: c.BaseTemplate, remove_kwargs: Dict[str, Any] = None, - suppress_not_removable: bool = False): + def __init__( + self, template: c.BaseTemplate, remove_kwargs: Dict[str, Any] = None, suppress_not_removable: bool = False + ): self.template = template self.remove_kwargs = remove_kwargs or {} self.suppress_not_removable = suppress_not_removable @@ -82,7 +87,7 @@ def execute(self, conductor: c.Conductor, project: c.Project): ui.logger(__name__).warning(str(e)) def describe(self, conductor: c.Conductor, project: c.Project) -> str: - return f'{self.template.identifier} will be removed' + return f"{self.template.identifier} will be removed" def can_execute(self, conductor: c.Conductor, project: c.Project): return True @@ -114,15 +119,15 @@ def add_action(self, action: Action) -> None: def execute(self): if len(self.actions) == 0: - ui.logger(__name__).warning('No actions necessary.') + ui.logger(__name__).warning("No actions necessary.") return location = self.project.location - tfd, tfn = tempfile.mkstemp(prefix='pros-project-', suffix=f'-{self.project.name}.zip', text='w+b') - with os.fdopen(tfd, 'w+b') as tf: - with zipfile.ZipFile(tf, mode='w') as zf: - files, length = it.tee(location.glob('**/*'), 2) + tfd, tfn = tempfile.mkstemp(prefix="pros-project-", suffix=f"-{self.project.name}.zip", text="w+b") + with os.fdopen(tfd, "w+b") as tf: + with zipfile.ZipFile(tf, mode="w") as zf: + files, length = it.tee(location.glob("**/*"), 2) length = len(list(length)) - with ui.progressbar(files, length=length, label=f'Backing up {self.project.name} to {tfn}') as pb: + with ui.progressbar(files, length=length, label=f"Backing up {self.project.name} to {tfn}") as pb: for file in pb: zf.write(file, arcname=file.relative_to(location)) @@ -131,21 +136,21 @@ def execute(self): for action in self.actions: ui.logger(__name__).debug(action.describe(self.conductor, self.project)) rv = action.execute(self.conductor, self.project) - ui.logger(__name__).debug(f'{action} returned {rv}') + ui.logger(__name__).debug(f"{action} returned {rv}") if rv is not None and not rv: - raise ValueError('Action did not complete successfully') - ui.echo('All actions performed successfully') + raise ValueError("Action did not complete successfully") + ui.echo("All actions performed successfully") except Exception as e: - ui.logger(__name__).warning(f'Failed to perform transaction, restoring project to previous state') + ui.logger(__name__).warning(f"Failed to perform transaction, restoring project to previous state") with zipfile.ZipFile(tfn) as zf: - with ui.progressbar(zf.namelist(), label=f'Restoring {self.project.name} from {tfn}') as pb: + with ui.progressbar(zf.namelist(), label=f"Restoring {self.project.name} from {tfn}") as pb: for file in pb: zf.extract(file, path=location) ui.logger(__name__).exception(e) finally: - ui.echo(f'Removing {tfn}') + ui.echo(f"Removing {tfn}") os.remove(tfn) def apply_template(self, template: c.BaseTemplate, suppress_already_installed: bool = False, **kwargs): @@ -163,12 +168,9 @@ def change_name(self, new_name: str): def describe(self) -> str: if len(self.actions) > 0: - return '\n'.join( - f'- {a.describe(self.conductor, self.project)}' - for a in self.actions - ) + return "\n".join(f"- {a.describe(self.conductor, self.project)}" for a in self.actions) else: - return 'No actions necessary.' + return "No actions necessary." def can_execute(self) -> bool: return all(a.can_execute(self.conductor, self.project) for a in self.actions) diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index c71fcd1f..773e1c60 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -10,13 +10,21 @@ from pros.common.ui import EchoPipe from pros.conductor.project.template_resolution import TemplateAction from pros.config.config import Config, ConfigNotFoundException -from .ProjectReport import ProjectReport + from ..templates import BaseTemplate, LocalTemplate, Template from ..transaction import Transaction +from .ProjectReport import ProjectReport class Project(Config): - def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = True, defaults: dict = None, early_access: bool = False): + def __init__( + self, + path: str = ".", + create: bool = False, + raise_on_error: bool = True, + defaults: dict = None, + early_access: bool = False, + ): """ Instantiates a PROS project configuration :param path: A path to the project, may be the actual project.pros file, any child directory of the project, @@ -26,25 +34,29 @@ def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = :param raise_on_error: :param defaults: """ - file = Project.find_project(path or '.') + file = Project.find_project(path or ".") if file is None and create: - file = os.path.join(path, 'project.pros') if not os.path.basename(path) == 'project.pros' else path + file = os.path.join(path, "project.pros") if not os.path.basename(path) == "project.pros" else path elif file is None and raise_on_error: - raise ConfigNotFoundException('A project config was not found for {}'.format(path)) + raise ConfigNotFoundException("A project config was not found for {}".format(path)) if defaults is None: defaults = {} - self.target: str = defaults.get('target', 'cortex').lower() # VEX Hardware target (V5/Cortex) - self.templates: Dict[str, Template] = defaults.get('templates', {}) - self.upload_options: Dict = defaults.get('upload_options', {}) - self.project_name: str = defaults.get('project_name', None) + self.target: str = defaults.get("target", "cortex").lower() # VEX Hardware target (V5/Cortex) + self.templates: Dict[str, Template] = defaults.get("templates", {}) + self.upload_options: Dict = defaults.get("upload_options", {}) + self.project_name: str = defaults.get("project_name", None) self.use_early_access = early_access super(Project, self).__init__(file, error_on_decode=raise_on_error) - if 'kernel' in self.__dict__: + if "kernel" in self.__dict__: # Add backwards compatibility with PROS CLI 2 projects by adding kernel as a pseudo-template - self.templates['kernel'] = Template(user_files=self.all_files, name='kernel', - version=self.__dict__['kernel'], target=self.target, - output='bin/output.bin') + self.templates["kernel"] = Template( + user_files=self.all_files, + name="kernel", + version=self.__dict__["kernel"], + target=self.target, + output="bin/output.bin", + ) @property def location(self) -> pathlib.Path: @@ -56,21 +68,24 @@ def path(self): @property def name(self): - return self.project_name or os.path.basename(self.location) \ - or os.path.basename(self.templates['kernel'].metadata['output']) \ - or 'pros' + return ( + self.project_name + or os.path.basename(self.location) + or os.path.basename(self.templates["kernel"].metadata["output"]) + or "pros" + ) @property def all_files(self) -> Set[str]: - return {os.path.relpath(p, self.location) for p in - glob.glob(f'{self.location}/**/*', recursive=True)} + return {os.path.relpath(p, self.location) for p in glob.glob(f"{self.location}/**/*", recursive=True)} def get_template_actions(self, template: BaseTemplate) -> TemplateAction: ui.logger(__name__).debug(template) if template.target != self.target: return TemplateAction.NotApplicable from semantic_version import Spec, Version - if template.name != 'kernel' and Version(self.kernel) not in Spec(template.supported_kernels or '>0'): + + if template.name != "kernel" and Version(self.kernel) not in Spec(template.supported_kernels or ">0"): if template.name in self.templates.keys(): return TemplateAction.AlreadyInstalled return TemplateAction.NotApplicable @@ -98,10 +113,16 @@ def template_is_upgradeable(self, query: BaseTemplate) -> bool: def template_is_applicable(self, query: BaseTemplate, force_apply: bool = False) -> bool: ui.logger(__name__).debug(query.target) return self.get_template_actions(query) in ( - TemplateAction.ForcedApplicable if force_apply else TemplateAction.UnforcedApplicable) - - def apply_template(self, template: LocalTemplate, force_system: bool = False, force_user: bool = False, - remove_empty_directories: bool = False): + TemplateAction.ForcedApplicable if force_apply else TemplateAction.UnforcedApplicable + ) + + def apply_template( + self, + template: LocalTemplate, + force_system: bool = False, + force_user: bool = False, + remove_empty_directories: bool = False, + ): """ Applies a template to a project :param remove_empty_directories: @@ -115,7 +136,7 @@ def apply_template(self, template: LocalTemplate, force_system: bool = False, fo installed_user_files = set() for lib_name, lib in self.templates.items(): if lib_name == template.name or lib.name == template.name: - logger(__name__).debug(f'{lib} is already installed') + logger(__name__).debug(f"{lib} is already installed") logger(__name__).debug(lib.system_files) logger(__name__).debug(lib.user_files) transaction.extend_rm(lib.system_files) @@ -126,12 +147,16 @@ def apply_template(self, template: LocalTemplate, force_system: bool = False, fo # remove newly deprecated user files deprecated_user_files = installed_user_files.intersection(self.all_files) - set(template.user_files) if any(deprecated_user_files): - if force_user or confirm(f'The following user files have been deprecated: {deprecated_user_files}. ' - f'Do you want to update them?'): + if force_user or confirm( + f"The following user files have been deprecated: {deprecated_user_files}. " + f"Do you want to update them?" + ): transaction.extend_rm(deprecated_user_files) else: - logger(__name__).warning(f'Deprecated user files may cause weird quirks. See migration guidelines from ' - f'{template.identifier}\'s release notes.') + logger(__name__).warning( + f"Deprecated user files may cause weird quirks. See migration guidelines from " + f"{template.identifier}'s release notes." + ) # Carry forward deprecated user files into the template about to be applied so that user gets warned in # future. template.user_files.extend(deprecated_user_files) @@ -144,8 +169,13 @@ def new_user_filter(new_file: str) -> bool: src/opcontrol.c and src/opcontrol.cpp are friends because they have the same stem src/opcontrol.c and include/opcontrol.h are not because they are in different directories """ - return not any([(os.path.normpath(file) in transaction.effective_state) for file in template.user_files if - os.path.splitext(file)[0] == os.path.splitext(new_file)[0]]) + return not any( + [ + (os.path.normpath(file) in transaction.effective_state) + for file in template.user_files + if os.path.splitext(file)[0] == os.path.splitext(new_file)[0] + ] + ) if force_user: new_user_files = template.real_user_files @@ -154,20 +184,23 @@ def new_user_filter(new_file: str) -> bool: transaction.extend_add(new_user_files, template.location) if any([file in transaction.effective_state for file in template.system_files]) and not force_system: - confirm(f'Some required files for {template.identifier} already exist in the project. ' - f'Overwrite the existing files?', abort=True) + confirm( + f"Some required files for {template.identifier} already exist in the project. " + f"Overwrite the existing files?", + abort=True, + ) transaction.extend_add(template.system_files, template.location) logger(__name__).debug(transaction) - transaction.commit(label=f'Applying {template.identifier}', remove_empty_directories=remove_empty_directories) + transaction.commit(label=f"Applying {template.identifier}", remove_empty_directories=remove_empty_directories) self.templates[template.name] = template self.save() def remove_template(self, template: Template, remove_user: bool = False, remove_empty_directories: bool = True): if not self.template_is_installed(template): - raise ValueError(f'{template.identifier} is not installed on this project.') - if template.name == 'kernel': - raise ValueError(f'Cannot remove the kernel template. Maybe create a new project?') + raise ValueError(f"{template.identifier} is not installed on this project.") + if template.name == "kernel": + raise ValueError(f"Cannot remove the kernel template. Maybe create a new project?") real_template = LocalTemplate(orig=template, location=self.location) transaction = Transaction(self.location, set(self.all_files)) @@ -175,8 +208,9 @@ def remove_template(self, template: Template, remove_user: bool = False, remove_ if remove_user: transaction.extend_rm(real_template.real_user_files) logger(__name__).debug(transaction) - transaction.commit(label=f'Removing {template.identifier}...', - remove_empty_directories=remove_empty_directories) + transaction.commit( + label=f"Removing {template.identifier}...", remove_empty_directories=remove_empty_directories + ) del self.templates[real_template.name] self.save() @@ -196,47 +230,60 @@ def resolve_template(self, query: Union[str, BaseTemplate]) -> List[Template]: return [local_template for local_template in self.templates.values() if local_template.satisfies(query)] def __str__(self): - return f'Project: {self.location} ({self.name}) for {self.target} with ' \ + return ( + f"Project: {self.location} ({self.name}) for {self.target} with " f'{", ".join([str(t) for t in self.templates.values()])}' + ) @property def kernel(self): - if 'kernel' in self.templates: - return self.templates['kernel'].version - elif hasattr(self.__dict__, 'kernel'): - return self.__dict__['kernel'] - return '' + if "kernel" in self.templates: + return self.templates["kernel"].version + elif hasattr(self.__dict__, "kernel"): + return self.__dict__["kernel"] + return "" @property def output(self): - if 'kernel' in self.templates: - return self.templates['kernel'].metadata['output'] - elif hasattr(self.__dict__, 'output'): - return self.__dict__['output'] - return 'bin/output.bin' + if "kernel" in self.templates: + return self.templates["kernel"].metadata["output"] + elif hasattr(self.__dict__, "output"): + return self.__dict__["output"] + return "bin/output.bin" def make(self, build_args: List[str]): import subprocess + env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred - if os.environ.get('PROS_TOOLCHAIN'): - env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + env["PATH"] = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + env["PATH"] # call make.exe if on Windows - if os.name == 'nt' and os.environ.get('PROS_TOOLCHAIN'): - make_cmd = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin', 'make.exe') + if os.name == "nt" and os.environ.get("PROS_TOOLCHAIN"): + make_cmd = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin", "make.exe") else: - make_cmd = 'make' + make_cmd = "make" stdout_pipe = EchoPipe() stderr_pipe = EchoPipe(err=True) - process=None + process = None try: - process = subprocess.Popen(executable=make_cmd, args=[make_cmd, *build_args], cwd=self.directory, env=env, - stdout=stdout_pipe, stderr=stderr_pipe) + process = subprocess.Popen( + executable=make_cmd, + args=[make_cmd, *build_args], + cwd=self.directory, + env=env, + stdout=stdout_pipe, + stderr=stderr_pipe, + ) except Exception as e: - if not os.environ.get('PROS_TOOLCHAIN'): - ui.logger(__name__).warn("PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n") - ui.logger(__name__).error(f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n",extra={'sentry':False}) + if not os.environ.get("PROS_TOOLCHAIN"): + ui.logger(__name__).warn( + "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" + ) + ui.logger(__name__).error( + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={"sentry": False} + ) stdout_pipe.close() stderr_pipe.close() sys.exit() @@ -245,20 +292,26 @@ def make(self, build_args: List[str]): process.wait() return process.returncode - def make_scan_build(self, build_args: Tuple[str], cdb_file: Optional[Union[str, io.IOBase]] = None, - suppress_output: bool = False, sandbox: bool = False): - from libscanbuild.compilation import Compilation, CompilationDatabase - from libscanbuild.arguments import create_intercept_parser + def make_scan_build( + self, + build_args: Tuple[str], + cdb_file: Optional[Union[str, io.IOBase]] = None, + suppress_output: bool = False, + sandbox: bool = False, + ): + import argparse import itertools - import subprocess - import argparse + + from libscanbuild.arguments import create_intercept_parser + from libscanbuild.compilation import Compilation, CompilationDatabase if sandbox: import tempfile + td = tempfile.TemporaryDirectory() td_path = td.name.replace("\\", "/") - build_args = [*build_args, f'BINDIR={td_path}'] + build_args = [*build_args, f"BINDIR={td_path}"] def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compilation]]: """ @@ -267,34 +320,45 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil :param args: the parsed and validated command line arguments :return: the exit status of build process. """ - from libscanbuild.intercept import setup_environment, run_build, exec_trace_files, parse_exec_trace, \ - compilations from libear import temporary_directory - - with temporary_directory(prefix='intercept-') as tmp_dir: + from libscanbuild.intercept import ( + compilations, + exec_trace_files, + parse_exec_trace, + run_build, + setup_environment, + ) + + with temporary_directory(prefix="intercept-") as tmp_dir: # run the build command environment = setup_environment(args, tmp_dir) - if os.environ.get('PROS_TOOLCHAIN'): - environment['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + \ - environment['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + environment["PATH"] = ( + os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + environment["PATH"] + ) - if sys.platform == 'darwin': - environment['PATH'] = os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + \ - environment['PATH'] + if sys.platform == "darwin": + environment["PATH"] = ( + os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + environment["PATH"] + ) if not suppress_output: pipe = EchoPipe() else: pipe = subprocess.DEVNULL logger(__name__).debug(self.directory) - exit_code=None + exit_code = None try: exit_code = run_build(args.build, env=environment, stdout=pipe, stderr=pipe, cwd=self.directory) except Exception as e: - if not os.environ.get('PROS_TOOLCHAIN'): - ui.logger(__name__).warn("PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n") - ui.logger(__name__).error(f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n",extra={'sentry':False}) - if not suppress_output: + if not os.environ.get("PROS_TOOLCHAIN"): + ui.logger(__name__).warn( + "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" + ) + ui.logger(__name__).error( + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={"sentry": False} + ) + if not suppress_output: pipe.close() sys.exit() if not suppress_output: @@ -306,14 +370,23 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil return exit_code, iter(set(current)) # call make.exe if on Windows - if os.name == 'nt' and os.environ.get('PROS_TOOLCHAIN'): - make_cmd = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin', 'make.exe') + if os.name == "nt" and os.environ.get("PROS_TOOLCHAIN"): + make_cmd = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin", "make.exe") else: - make_cmd = 'make' + make_cmd = "make" args = create_intercept_parser().parse_args( - ['--override-compiler', '--use-cc', 'arm-none-eabi-gcc', '--use-c++', 'arm-none-eabi-g++', make_cmd, - *build_args, - 'CC=intercept-cc', 'CXX=intercept-c++']) + [ + "--override-compiler", + "--use-cc", + "arm-none-eabi-gcc", + "--use-c++", + "arm-none-eabi-g++", + make_cmd, + *build_args, + "CC=intercept-cc", + "CXX=intercept-c++", + ] + ) exit_code, entries = libscanbuild_capture(args) if sandbox and td: @@ -323,64 +396,67 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil if not any(any_entries): return exit_code if not suppress_output: - ui.echo('Capturing metadata for PROS Editor...') + ui.echo("Capturing metadata for PROS Editor...") env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred - if os.environ.get('PROS_TOOLCHAIN'): - env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] - cc_sysroot = subprocess.run([make_cmd, 'cc-sysroot'], env=env, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=self.directory) + if os.environ.get("PROS_TOOLCHAIN"): + env["PATH"] = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + env["PATH"] + cc_sysroot = subprocess.run( + [make_cmd, "cc-sysroot"], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + ) lines = str(cc_sysroot.stderr.decode()).splitlines() + str(cc_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cc_sysroot_includes = [] copy = False for line in lines: - if line == '#include <...> search starts here:': + if line == "#include <...> search starts here:": copy = True continue - if line == 'End of search list.': + if line == "End of search list.": copy = False continue if copy: - cc_sysroot_includes.append(f'-isystem{line}') - cxx_sysroot = subprocess.run([make_cmd, 'cxx-sysroot'], env=env, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=self.directory) + cc_sysroot_includes.append(f"-isystem{line}") + cxx_sysroot = subprocess.run( + [make_cmd, "cxx-sysroot"], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + ) lines = str(cxx_sysroot.stderr.decode()).splitlines() + str(cxx_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cxx_sysroot_includes = [] copy = False for line in lines: - if line == '#include <...> search starts here:': + if line == "#include <...> search starts here:": copy = True continue - if line == 'End of search list.': + if line == "End of search list.": copy = False continue if copy: - cxx_sysroot_includes.append(f'-isystem{line}') + cxx_sysroot_includes.append(f"-isystem{line}") new_entries, entries = itertools.tee(entries, 2) new_sources = set([e.source for e in entries]) if not cdb_file: - cdb_file = os.path.join(self.directory, 'compile_commands.json') + cdb_file = os.path.join(self.directory, "compile_commands.json") if isinstance(cdb_file, str) and os.path.isfile(cdb_file): - old_entries = itertools.filterfalse(lambda entry: entry.source in new_sources, - CompilationDatabase.load(cdb_file)) + old_entries = itertools.filterfalse( + lambda entry: entry.source in new_sources, CompilationDatabase.load(cdb_file) + ) else: old_entries = [] - extra_flags = ['-target', 'armv7ar-none-none-eabi'] - logger(__name__).debug('cc_sysroot_includes') + extra_flags = ["-target", "armv7ar-none-none-eabi"] + logger(__name__).debug("cc_sysroot_includes") logger(__name__).debug(cc_sysroot_includes) - logger(__name__).debug('cxx_sysroot_includes') + logger(__name__).debug("cxx_sysroot_includes") logger(__name__).debug(cxx_sysroot_includes) - if sys.platform == 'win32': + if sys.platform == "win32": extra_flags.extend(["-fno-ms-extensions", "-fno-ms-compatibility", "-fno-delayed-template-parsing"]) def new_entry_map(entry): - if entry.compiler == 'c': + if entry.compiler == "c": entry.flags = extra_flags + cc_sysroot_includes + entry.flags - elif entry.compiler == 'c++': + elif entry.compiler == "c++": entry.flags = extra_flags + cxx_sysroot_includes + entry.flags return entry @@ -388,14 +464,15 @@ def new_entry_map(entry): def entry_map(entry: Compilation): json_entry = entry.as_db_entry() - json_entry['arguments'][0] = 'clang' if entry.compiler == 'c' else 'clang++' + json_entry["arguments"][0] = "clang" if entry.compiler == "c" else "clang++" return json_entry entries = itertools.chain(old_entries, new_entries) json_entries = list(map(entry_map, entries)) if isinstance(cdb_file, str): - cdb_file = open(cdb_file, 'w') + cdb_file = open(cdb_file, "w") import json + json.dump(json_entries, cdb_file, sort_keys=True, indent=4) return exit_code @@ -403,21 +480,25 @@ def entry_map(entry: Compilation): def compile(self, build_args: List[str], scan_build: Optional[bool] = None): if scan_build is None: from pros.config.cli_config import cli_config + scan_build = cli_config().use_build_compile_commands return self.make_scan_build(build_args) if scan_build else self.make(build_args) @staticmethod def find_project(path: str, recurse_times: int = 10): - path = os.path.abspath(path or '.') + path = os.path.abspath(path or ".") if os.path.isfile(path): path = os.path.dirname(path) if os.path.isdir(path): - for n in range(recurse_times): + for _ in range(recurse_times): if path is not None and os.path.isdir(path): - files = [f for f in os.listdir(path) - if os.path.isfile(os.path.join(path, f)) and f.lower() == 'project.pros'] + files = [ + f + for f in os.listdir(path) + if os.path.isfile(os.path.join(path, f)) and f.lower() == "project.pros" + ] if len(files) == 1: # found a project.pros file! - logger(__name__).info(f'Found Project Path: {os.path.join(path, files[0])}') + logger(__name__).info(f"Found Project Path: {os.path.join(path, files[0])}") return os.path.join(path, files[0]) path = os.path.dirname(path) else: @@ -425,4 +506,4 @@ def find_project(path: str, recurse_times: int = 10): return None -__all__ = ['Project', 'ProjectReport'] +__all__ = ["Project", "ProjectReport"] diff --git a/pros/conductor/templates/base_template.py b/pros/conductor/templates/base_template.py index 95a19064..2eb7d6ad 100644 --- a/pros/conductor/templates/base_template.py +++ b/pros/conductor/templates/base_template.py @@ -12,20 +12,20 @@ def __init__(self, **kwargs): self.supported_kernels: str = None self.target: str = None self.metadata: Dict[str, Any] = {} - if 'orig' in kwargs: - self.__dict__.update({k: v for k, v in kwargs.pop('orig').__dict__.items() if k in self.__dict__}) + if "orig" in kwargs: + self.__dict__.update({k: v for k, v in kwargs.pop("orig").__dict__.items() if k in self.__dict__}) self.__dict__.update({k: v for k, v in kwargs.items() if k in self.__dict__}) self.metadata.update({k: v for k, v in kwargs.items() if k not in self.__dict__}) - if 'depot' in self.metadata and 'origin' not in self.metadata: - self.metadata['origin'] = self.metadata.pop('depot') - if 'd' in self.metadata and 'depot' not in self.metadata: - self.metadata['depot'] = self.metadata.pop('d') - if 'l' in self.metadata and 'location' not in self.metadata: - self.metadata['location'] = self.metadata.pop('l') - if self.name == 'pros': - self.name = 'kernel' + if "depot" in self.metadata and "origin" not in self.metadata: + self.metadata["origin"] = self.metadata.pop("depot") + if "d" in self.metadata and "depot" not in self.metadata: + self.metadata["depot"] = self.metadata.pop("d") + if "l" in self.metadata and "location" not in self.metadata: + self.metadata["location"] = self.metadata.pop("l") + if self.name == "pros": + self.name = "kernel" - def satisfies(self, query: 'BaseTemplate', kernel_version: Union[str, Version] = None) -> bool: + def satisfies(self, query: "BaseTemplate", kernel_version: Union[str, Version] = None) -> bool: if query.name and self.name != query.name: return False if query.target and self.target != query.target: @@ -47,7 +47,7 @@ def satisfies(self, query: 'BaseTemplate', kernel_version: Union[str, Version] = def __str__(self): fields = [self.metadata.get("origin", None), self.target, self.__class__.__name__] additional = ", ".join(map(str, filter(bool, fields))) - return f'{self.identifier} ({additional})' + return f"{self.identifier} ({additional})" def __gt__(self, other): if isinstance(other, BaseTemplate): @@ -65,30 +65,30 @@ def __eq__(self, other): def __hash__(self): return self.identifier.__hash__() - def as_query(self, version='>0', metadata=False, **kwargs): + def as_query(self, version=">0", metadata=False, **kwargs): if isinstance(metadata, bool) and not metadata: metadata = dict() return BaseTemplate(orig=self, version=version, metadata=metadata, **kwargs) @property def identifier(self): - return f'{self.name}@{self.version}' + return f"{self.name}@{self.version}" @property def origin(self): - return self.metadata.get('origin', 'Unknown') + return self.metadata.get("origin", "Unknown") @classmethod - def create_query(cls, name: str = None, **kwargs) -> 'BaseTemplate': + def create_query(cls, name: str = None, **kwargs) -> "BaseTemplate": if not isinstance(name, str): return cls(**kwargs) - if name.count('@') > 1: - raise ValueError(f'Malformed identifier: {name}') - if '@' in name: - name, kwargs['version'] = name.split('@') - if kwargs.get('version', 'latest') == 'latest': - kwargs['version'] = '>=0' - if name == 'kernal': + if name.count("@") > 1: + raise ValueError(f"Malformed identifier: {name}") + if "@" in name: + name, kwargs["version"] = name.split("@") + if kwargs.get("version", "latest") == "latest": + kwargs["version"] = ">=0" + if name == "kernal": ui.echo("Assuming 'kernal' is the British spelling of kernel.") - name = 'kernel' + name = "kernel" return cls(name=name, **kwargs) diff --git a/pros/conductor/templates/external_template.py b/pros/conductor/templates/external_template.py index ce08662e..870ef28d 100644 --- a/pros/conductor/templates/external_template.py +++ b/pros/conductor/templates/external_template.py @@ -10,18 +10,18 @@ class ExternalTemplate(Config, Template): def __init__(self, file: str, **kwargs): if os.path.isdir(file): - file = os.path.join(file, 'template.pros') + file = os.path.join(file, "template.pros") elif zipfile.is_zipfile(file): self.tf = tempfile.NamedTemporaryFile(delete=False) with zipfile.ZipFile(file) as zf: - with zf.open('template.pros') as zt: + with zf.open("template.pros") as zt: self.tf.write(zt.read()) self.tf.seek(0, 0) file = self.tf.name - error_on_decode = kwargs.pop('error_on_decode', False) + error_on_decode = kwargs.pop("error_on_decode", False) Template.__init__(self, **kwargs) Config.__init__(self, file, error_on_decode=error_on_decode) def __del__(self): - if hasattr(self, 'tr'): + if hasattr(self, "tr"): del self.tf diff --git a/pros/conductor/templates/local_template.py b/pros/conductor/templates/local_template.py index 53d66e73..d3be5b34 100644 --- a/pros/conductor/templates/local_template.py +++ b/pros/conductor/templates/local_template.py @@ -4,7 +4,7 @@ def _fix_path(*paths: str) -> str: - return os.path.normpath(os.path.join(*paths).replace('\\', '/')) + return os.path.normpath(os.path.join(*paths).replace("\\", "/")) class LocalTemplate(Template): diff --git a/pros/conductor/transaction.py b/pros/conductor/transaction.py index 0fcb05d7..4b7ba650 100644 --- a/pros/conductor/transaction.py +++ b/pros/conductor/transaction.py @@ -19,7 +19,7 @@ def extend_add(self, paths: Iterable[str], src: str): self.add(path, src) def add(self, path: str, src: str): - path = os.path.normpath(path.replace('\\', '/')) + path = os.path.normpath(path.replace("\\", "/")) self._add_files.add(path) self.effective_state.add(path) self._add_srcs[path] = src @@ -31,7 +31,7 @@ def extend_rm(self, paths: Iterable[str]): self.rm(path) def rm(self, path: str): - path = os.path.normpath(path.replace('\\', '/')) + path = os.path.normpath(path.replace("\\", "/")) self._rm_files.add(path) if path in self.effective_state: self.effective_state.remove(path) @@ -39,18 +39,18 @@ def rm(self, path: str): self._add_files.remove(path) self._add_srcs.pop(path) - def commit(self, label: str = 'Committing transaction', remove_empty_directories: bool = True): + def commit(self, label: str = "Committing transaction", remove_empty_directories: bool = True): with ui.progressbar(length=len(self._rm_files) + len(self._add_files), label=label) as pb: - for file in sorted(self._rm_files, key=lambda p: p.count('/') + p.count('\\'), reverse=True): + for file in sorted(self._rm_files, key=lambda p: p.count("/") + p.count("\\"), reverse=True): file_path = os.path.join(self.location, file) if os.path.isfile(file_path): - logger(__name__).info(f'Removing {file}') + logger(__name__).info(f"Removing {file}") os.remove(os.path.join(self.location, file)) else: - logger(__name__).info(f'Not removing nonexistent {file}') + logger(__name__).info(f"Not removing nonexistent {file}") pardir = os.path.abspath(os.path.join(file_path, os.pardir)) while remove_empty_directories and len(os.listdir(pardir)) == 0: - logger(__name__).info(f'Removing {os.path.relpath(pardir, self.location)}') + logger(__name__).info(f"Removing {os.path.relpath(pardir, self.location)}") os.rmdir(pardir) pardir = os.path.abspath(os.path.join(pardir, os.pardir)) if pardir == self.location: @@ -63,13 +63,13 @@ def commit(self, label: str = 'Committing transaction', remove_empty_directories destination = os.path.join(self.location, file) if os.path.isfile(source): if not os.path.isdir(os.path.dirname(destination)): - logger(__name__).debug(f'Creating directories: f{destination}') + logger(__name__).debug(f"Creating directories: f{destination}") os.makedirs(os.path.dirname(destination), exist_ok=True) - logger(__name__).info(f'Adding {file}') + logger(__name__).info(f"Adding {file}") shutil.copy(os.path.join(self._add_srcs[file], file), os.path.join(self.location, file)) else: logger(__name__).info(f"Not copying {file} because {source} doesn't exist.") pb.update(1) def __str__(self): - return f'Transaction Object: ADD: {self._add_files}\tRM: {self._rm_files}\tLocation: {self.location}' + return f"Transaction Object: ADD: {self._add_files}\tRM: {self._rm_files}\tLocation: {self.location}" diff --git a/pros/config/cli_config.py b/pros/config/cli_config.py index 8c962047..45f18a33 100644 --- a/pros/config/cli_config.py +++ b/pros/config/cli_config.py @@ -6,6 +6,7 @@ import click import pros.common + # import pros.conductor.providers.github_releases as githubreleases from pros.config.config import Config @@ -16,7 +17,7 @@ class CliConfig(Config): def __init__(self, file=None): if not file: - file = os.path.join(click.get_app_dir('PROS'), 'cli.pros') + file = os.path.join(click.get_app_dir("PROS"), "cli.pros") self.update_frequency: timedelta = timedelta(hours=1) self.override_use_build_compile_commands: Optional[bool] = None self.offer_sentry: Optional[bool] = None @@ -30,18 +31,19 @@ def needs_online_fetch(self, last_fetch: datetime) -> bool: def use_build_compile_commands(self): if self.override_use_build_compile_commands is not None: return self.override_use_build_compile_commands - paths = [os.path.join('~', '.pros-atom'), os.path.join('~', '.pros-editor')] + paths = [os.path.join("~", ".pros-atom"), os.path.join("~", ".pros-editor")] return any([os.path.exists(os.path.expanduser(p)) for p in paths]) - def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifestV1']: + def get_upgrade_manifest(self, force: bool = False) -> Optional["UpgradeManifestV1"]: from pros.upgrade.manifests.upgrade_manifest_v1 import UpgradeManifestV1 # noqa: F811 if not force and not self.needs_online_fetch(self.cached_upgrade[0]): return self.cached_upgrade[1] - pros.common.logger(__name__).info('Fetching upgrade manifest...') - import requests + pros.common.logger(__name__).info("Fetching upgrade manifest...") import jsonpickle - r = requests.get('https://purduesigbots.github.io/pros-mainline/cli-updates.json') + import requests + + r = requests.get("https://purduesigbots.github.io/pros-mainline/cli-updates.json") pros.common.logger(__name__).debug(r) if r.status_code == 200: try: @@ -53,7 +55,7 @@ def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifest self.save() return self.cached_upgrade[1] else: - pros.common.logger(__name__).warning(f'Failed to fetch CLI updates because status code: {r.status_code}') + pros.common.logger(__name__).warning(f"Failed to fetch CLI updates because status code: {r.status_code}") pros.common.logger(__name__).debug(r) return None @@ -64,6 +66,6 @@ def cli_config() -> CliConfig: return CliConfig() ctx.ensure_object(dict) assert isinstance(ctx.obj, dict) - if not hasattr(ctx.obj, 'cli_config') or not isinstance(ctx.obj['cli_config'], CliConfig): - ctx.obj['cli_config'] = CliConfig() - return ctx.obj['cli_config'] + if not hasattr(ctx.obj, "cli_config") or not isinstance(ctx.obj["cli_config"], CliConfig): + ctx.obj["cli_config"] = CliConfig() + return ctx.obj["cli_config"] diff --git a/pros/config/config.py b/pros/config/config.py index 984b668a..b0bb111e 100644 --- a/pros/config/config.py +++ b/pros/config/config.py @@ -1,6 +1,7 @@ import json.decoder import jsonpickle + from pros.common.utils import * @@ -16,39 +17,40 @@ class Config(object): """ def __init__(self, file, error_on_decode=False): - logger(__name__).debug('Opening {} ({})'.format(file, self.__class__.__name__)) + logger(__name__).debug("Opening {} ({})".format(file, self.__class__.__name__)) self.save_file = file # __ignored property has any fields which shouldn't be included the pickled config file - self.__ignored = self.__dict__.get('_Config__ignored', []) - self.__ignored.append('save_file') - self.__ignored.append('_Config__ignored') + self.__ignored = self.__dict__.get("_Config__ignored", []) + self.__ignored.append("save_file") + self.__ignored.append("_Config__ignored") if file: # If the file already exists, update this new config with the values in the file if os.path.isfile(file): - with open(file, 'r') as f: + with open(file, "r") as f: try: result = jsonpickle.decode(f.read()) if isinstance(result, dict): - if 'py/state' in result: - class_name = '{}.{}'.format(self.__class__.__module__, self.__class__.__qualname__) - logger(__name__).debug( - 'Coercing {} to {}'.format(result['py/object'], class_name)) - old_object = result['py/object'] + if "py/state" in result: + class_name = "{}.{}".format(self.__class__.__module__, self.__class__.__qualname__) + logger(__name__).debug("Coercing {} to {}".format(result["py/object"], class_name)) + old_object = result["py/object"] try: - result['py/object'] = class_name + result["py/object"] = class_name result = jsonpickle.unpickler.Unpickler().restore(result) except (json.decoder.JSONDecodeError, AttributeError) as e: logger(__name__).debug(e) - logger(__name__).warning(f'Couldn\'t coerce {file} ({old_object}) to ' - f'{class_name}. Using rudimentary coercion') - self.__dict__.update(result['py/state']) + logger(__name__).warning( + f"Couldn't coerce {file} ({old_object}) to " + f"{class_name}. Using rudimentary coercion" + ) + self.__dict__.update(result["py/state"]) else: self.__dict__.update(result) elif isinstance(result, object): self.__dict__.update(result.__dict__) except (json.decoder.JSONDecodeError, AttributeError, UnicodeDecodeError) as e: if error_on_decode: - logger(__name__).error(f'Error parsing {file}') + logger(__name__).error(f"Error parsing {file}") logger(__name__).exception(e) raise e else: @@ -56,7 +58,7 @@ def __init__(self, file, error_on_decode=False): pass # obvious elif os.path.isdir(file): - raise ValueError('{} must be a file, not a directory'.format(file)) + raise ValueError("{} must be a file, not a directory".format(file)) # The file didn't exist when we created, so we'll save the default values else: try: @@ -66,14 +68,15 @@ def __init__(self, file, error_on_decode=False): logger(__name__).exception(e) raise e else: - logger(__name__).debug('Failed to save {} ({})'.format(file, e)) + logger(__name__).debug("Failed to save {} ({})".format(file, e)) from pros.common.sentry import add_context + add_context(self) def __getstate__(self): state = self.__dict__.copy() - if '_Config__ignored' in self.__dict__: + if "_Config__ignored" in self.__dict__: for key in [k for k in self.__ignored if k in state]: del state[key] return state @@ -82,7 +85,7 @@ def __setstate__(self, state): self.__dict__.update(state) def __str__(self): - jsonpickle.set_encoder_options('json', sort_keys=True) + jsonpickle.set_encoder_options("json", sort_keys=True) return jsonpickle.encode(self) def delete(self): @@ -92,15 +95,15 @@ def delete(self): def save(self, file: str = None) -> None: if file is None: file = self.save_file - jsonpickle.set_encoder_options('json', sort_keys=True, indent=4) + jsonpickle.set_encoder_options("json", sort_keys=True, indent=4) if os.path.dirname(file): os.makedirs(os.path.dirname(file), exist_ok=True) - with open(file, 'w') as f: + with open(file, "w") as f: f.write(jsonpickle.encode(self)) - logger(__name__).debug('Saved {}'.format(file)) + logger(__name__).debug("Saved {}".format(file)) def migrate(self, migration): - for (old, new) in migration.iteritems(): + for old, new in migration.iteritems(): if self.__dict__.get(old) is not None: self.__dict__[new] = self.__dict__[old] del self.__dict__[old] diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index 6f786105..40202be1 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -1,84 +1,83 @@ import json -from os import path +import random import uuid +from concurrent.futures import as_completed +from os import path + import requests from requests_futures.sessions import FuturesSession -import random -from concurrent.futures import as_completed -url = 'https://www.google-analytics.com/collect' -agent = 'pros-cli' +url = "https://www.google-analytics.com/collect" +agent = "pros-cli" """ PROS ANALYTICS CLASS """ -class Analytics(): + +class Analytics: def __init__(self): from pros.config.cli_config import cli_config as get_cli_config + self.cli_config = get_cli_config() - #If GA hasn't been setup yet (first time install/update) + # If GA hasn't been setup yet (first time install/update) if not self.cli_config.ga: - #Default values for GA - self.cli_config.ga = { - "enabled": "True", - "ga_id": "UA-84548828-8", - "u_id": str(uuid.uuid4()) - } + # Default values for GA + self.cli_config.ga = {"enabled": "True", "ga_id": "UA-84548828-8", "u_id": str(uuid.uuid4())} self.cli_config.save() self.sent = False - #Variables that the class will use - self.gaID = self.cli_config.ga['ga_id'] - self.useAnalytics = self.cli_config.ga['enabled'] - self.uID = self.cli_config.ga['u_id'] + # Variables that the class will use + self.gaID = self.cli_config.ga["ga_id"] + self.useAnalytics = self.cli_config.ga["enabled"] + self.uID = self.cli_config.ga["u_id"] self.pendingRequests = [] - def send(self,action): + def send(self, action): if not self.useAnalytics or self.sent: return - self.sent=True # Prevent Send from being called multiple times + self.sent = True # Prevent Send from being called multiple times try: - #Payload to be sent to GA, idk what some of them are but it works + # Payload to be sent to GA, idk what some of them are but it works payload = { - 'v': 1, - 'tid': self.gaID, - 'aip': 1, - 'z': random.random(), - 'cid': self.uID, - 't': 'event', - 'ec': 'action', - 'ea': action, - 'el': 'CLI', - 'ev': '1', - 'ni': 0 + "v": 1, + "tid": self.gaID, + "aip": 1, + "z": random.random(), + "cid": self.uID, + "t": "event", + "ec": "action", + "ea": action, + "el": "CLI", + "ev": "1", + "ni": 0, } - session = FuturesSession() + session = FuturesSession() - #Send payload to GA servers - future = session.post(url=url, - data=payload, - headers={'User-Agent': agent}, - timeout=5.0) + # Send payload to GA servers + future = session.post(url=url, data=payload, headers={"User-Agent": agent}, timeout=5.0) self.pendingRequests.append(future) - except Exception as e: + except Exception: from pros.cli.common import logger - logger(__name__).warning("Unable to send analytics. Do you have a stable internet connection?", extra={'sentry': False}) + + logger(__name__).warning( + "Unable to send analytics. Do you have a stable internet connection?", extra={"sentry": False} + ) def set_use(self, value: bool): - #Sets if GA is being used or not + # Sets if GA is being used or not self.useAnalytics = value - self.cli_config.ga['enabled'] = self.useAnalytics + self.cli_config.ga["enabled"] = self.useAnalytics self.cli_config.save() - + def process_requests(self): responses = [] for future in as_completed(self.pendingRequests): try: response = future.result() - - if not response.status_code==200: + + if not response.status_code == 200: print("Something went wrong while sending analytics!") print(response) @@ -87,9 +86,8 @@ def process_requests(self): except Exception: print("Something went wrong while sending analytics!") - self.pendingRequests.clear() return responses -analytics = Analytics() \ No newline at end of file +analytics = Analytics() diff --git a/pros/serial/__init__.py b/pros/serial/__init__.py index 0177d021..36e16fa2 100644 --- a/pros/serial/__init__.py +++ b/pros/serial/__init__.py @@ -4,11 +4,11 @@ def bytes_to_str(arr): if isinstance(arr, str): arr = bytes(arr) - if hasattr(arr, '__iter__'): - return ''.join('{:02X} '.format(x) for x in arr).strip() + if hasattr(arr, "__iter__"): + return "".join("{:02X} ".format(x) for x in arr).strip() else: # actually just a single byte - return '0x{:02X}'.format(arr) + return "0x{:02X}".format(arr) -def decode_bytes_to_str(data: Union[bytes, bytearray], encoding: str = 'utf-8', errors: str = 'strict') -> str: - return data.split(b'\0', 1)[0].decode(encoding=encoding, errors=errors) +def decode_bytes_to_str(data: Union[bytes, bytearray], encoding: str = "utf-8", errors: str = "strict") -> str: + return data.split(b"\0", 1)[0].decode(encoding=encoding, errors=errors) diff --git a/pros/serial/devices/__init__.py b/pros/serial/devices/__init__.py index ac6cd8c0..f5d0e9b1 100644 --- a/pros/serial/devices/__init__.py +++ b/pros/serial/devices/__init__.py @@ -1,2 +1,2 @@ from .generic_device import GenericDevice -from .stream_device import StreamDevice, RawStreamDevice +from .stream_device import RawStreamDevice, StreamDevice diff --git a/pros/serial/devices/stream_device.py b/pros/serial/devices/stream_device.py index 2649af97..a285619c 100644 --- a/pros/serial/devices/stream_device.py +++ b/pros/serial/devices/stream_device.py @@ -42,7 +42,7 @@ def promiscuous(self, value: bool): pass def read(self) -> Tuple[bytes, bytes]: - return b'', self.port.read_all() + return b"", self.port.read_all() def write(self, data: Union[bytes, str]): self.port.write(data) diff --git a/pros/serial/devices/vex/cortex_device.py b/pros/serial/devices/vex/cortex_device.py index 02dbfe0f..189b3e86 100644 --- a/pros/serial/devices/vex/cortex_device.py +++ b/pros/serial/devices/vex/cortex_device.py @@ -6,15 +6,15 @@ from typing import * from pros.common import ui -from pros.common.utils import retries, logger +from pros.common.utils import logger, retries from pros.conductor import Project from pros.serial import bytes_to_str from pros.serial.devices.vex import VEXCommError from pros.serial.devices.vex.stm32_device import STM32Device from pros.serial.ports import list_all_comports -from .vex_device import VEXDevice from ..system_device import SystemDevice +from .vex_device import VEXDevice def find_cortex_ports(): @@ -26,26 +26,28 @@ class SystemStatus(object): def __init__(self, data: Tuple[bytes, ...]): self.joystick_firmware = data[0:2] self.robot_firmware = data[2:4] - self.joystick_battery = float(data[4]) * .059 - self.robot_battery = float(data[5]) * .059 - self.backup_battery = float(data[6]) * .059 + self.joystick_battery = float(data[4]) * 0.059 + self.robot_battery = float(data[5]) * 0.059 + self.backup_battery = float(data[6]) * 0.059 self.flags = CortexDevice.SystemStatusFlags(data[7]) def __str__(self): - return f' Tether: {str(self.flags)}\n' \ - f' Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V ' \ - f'(Backup: {self.backup_battery:1.2f} V)\n' \ - f'Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ ' \ - f'{self.joystick_battery:1.2f} V' + return ( + f" Tether: {str(self.flags)}\n" + f" Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V " + f"(Backup: {self.backup_battery:1.2f} V)\n" + f"Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ " + f"{self.joystick_battery:1.2f} V" + ) class SystemStatusFlags(IntFlag): - DL_MODE = (1 << 0) - TETH_VN2 = (1 << 2) - FCS_CONNECT = (1 << 3) - TETH_USB = (1 << 4) - DIRECT_USB = (1 << 5) - FCS_AUTON = (1 << 6) - FCS_DISABLE = (1 << 7) + DL_MODE = 1 << 0 + TETH_VN2 = 1 << 2 + FCS_CONNECT = 1 << 3 + TETH_USB = 1 << 4 + DIRECT_USB = 1 << 5 + FCS_AUTON = 1 << 6 + FCS_DISABLE = 1 << 7 TETH_BITS = DL_MODE | TETH_VN2 | TETH_USB @@ -54,26 +56,26 @@ def andeq(a, b): return (a & b) == b if not self.value & self.TETH_BITS: - s = 'Serial w/VEXnet 1.0 Keys' + s = "Serial w/VEXnet 1.0 Keys" elif andeq(self.value, 0x01): - s = 'Serial w/VEXnet 1.0 Keys (turbo)' + s = "Serial w/VEXnet 1.0 Keys (turbo)" elif andeq(self.value, 0x04): - s = 'Serial w/VEXnet 2.0 Keys' + s = "Serial w/VEXnet 2.0 Keys" elif andeq(self.value, 0x05): - s = 'Serial w/VEXnet 2.0 Keys (download mode)' + s = "Serial w/VEXnet 2.0 Keys (download mode)" elif andeq(self.value, 0x10): - s = 'Serial w/ a USB Cable' + s = "Serial w/ a USB Cable" elif andeq(self.value, 0x20): - s = 'Directly w/ a USB Cable' + s = "Directly w/ a USB Cable" else: - s = 'Unknown' + s = "Unknown" if andeq(self.value, self.FCS_CONNECT): - s += ' - FCS Connected' + s += " - FCS Connected" return s def get_connected_device(self) -> SystemDevice: - logger(__name__).info('Interrogating Cortex...') + logger(__name__).info("Interrogating Cortex...") stm32 = STM32Device(self.port, do_negoitate=False) try: stm32.get(n_retries=1) @@ -82,21 +84,21 @@ def get_connected_device(self) -> SystemDevice: return self def upload_project(self, project: Project, **kwargs): - assert project.target == 'cortex' + assert project.target == "cortex" output_path = project.path.joinpath(project.output) if not output_path.exists(): - raise ui.dont_send(Exception('No output files were found! Have you built your project?')) - with output_path.open(mode='rb') as pf: + raise ui.dont_send(Exception("No output files were found! Have you built your project?")) + with output_path.open(mode="rb") as pf: return self.write_program(pf, **kwargs) def write_program(self, file: typing.BinaryIO, **kwargs): - action_string = '' - if hasattr(file, 'name'): - action_string += f' {Path(file.name).name}' - action_string += f' to Cortex on {self.port}' - ui.echo(f'Uploading {action_string}') + action_string = "" + if hasattr(file, "name"): + action_string += f" {Path(file.name).name}" + action_string += f" to Cortex on {self.port}" + ui.echo(f"Uploading {action_string}") - logger(__name__).info('Writing program to Cortex') + logger(__name__).info("Writing program to Cortex") status = self.query_system() logger(__name__).info(status) if not status.flags | self.SystemStatusFlags.TETH_USB and not status.flags | self.SystemStatusFlags.DL_MODE: @@ -105,25 +107,25 @@ def write_program(self, file: typing.BinaryIO, **kwargs): bootloader = self.expose_bootloader() rv = bootloader.write_program(file, **kwargs) - ui.finalize('upload', f'Finished uploading {action_string}') + ui.finalize("upload", f"Finished uploading {action_string}") return rv @retries def query_system(self) -> SystemStatus: - logger(__name__).info('Querying system information') + logger(__name__).info("Querying system information") rx = self._txrx_simple_struct(0x21, "<8B2x") status = CortexDevice.SystemStatus(rx) - ui.finalize('cortex-status', status) + ui.finalize("cortex-status", status) return status @retries def send_to_download_channel(self): - logger(__name__).info('Sending to download channel') + logger(__name__).info("Sending to download channel") self._txrx_ack_packet(0x35, timeout=1.0) @retries def expose_bootloader(self): - logger(__name__).info('Exposing bootloader') + logger(__name__).info("Exposing bootloader") for _ in itertools.repeat(None, 5): self._tx_packet(0x25) time.sleep(0.1) @@ -141,14 +143,14 @@ def _rx_ack(self, timeout: float = 0.01): def _txrx_ack_packet(self, command: int, timeout=0.1): """ - Goes through a send/receive cycle with a VEX device. - Transmits the command with the optional additional payload, then reads and parses the outer layer - of the response - :param command: Command to send the device - :param retries: Number of retries to attempt to parse the output before giving up and raising an error - :return: Returns a dictionary containing the received command field and the payload. Correctly computes - the payload length even if the extended command (0x56) is used (only applies to the V5). - """ + Goes through a send/receive cycle with a VEX device. + Transmits the command with the optional additional payload, then reads and parses the outer layer + of the response + :param command: Command to send the device + :param retries: Number of retries to attempt to parse the output before giving up and raising an error + :return: Returns a dictionary containing the received command field and the payload. Correctly computes + the payload length even if the extended command (0x56) is used (only applies to the V5). + """ tx = self._tx_packet(command) self._rx_ack(timeout=timeout) - logger(__name__).debug('TX: {}'.format(bytes_to_str(tx))) + logger(__name__).debug("TX: {}".format(bytes_to_str(tx))) diff --git a/pros/serial/devices/vex/crc.py b/pros/serial/devices/vex/crc.py index f53bee5d..764a42c0 100644 --- a/pros/serial/devices/vex/crc.py +++ b/pros/serial/devices/vex/crc.py @@ -9,15 +9,15 @@ def __init__(self, size: int, polynomial: int): for i in range(256): crc_accumulator = i << (self._size - 8) - for j in range(8): + for _ in range(8): if crc_accumulator & (1 << (self._size - 1)): crc_accumulator = (crc_accumulator << 1) ^ self._polynomial else: - crc_accumulator = (crc_accumulator << 1) + crc_accumulator = crc_accumulator << 1 self._table.append(crc_accumulator) def compute(self, data: Iterable[int], accumulator: int = 0): for d in data: - i = ((accumulator >> (self._size - 8)) ^ d) & 0xff + i = ((accumulator >> (self._size - 8)) ^ d) & 0xFF accumulator = ((accumulator << 8) ^ self._table[i]) & ((1 << self._size) - 1) return accumulator diff --git a/pros/serial/devices/vex/message.py b/pros/serial/devices/vex/message.py index 8a45b0c4..24139ff7 100644 --- a/pros/serial/devices/vex/message.py +++ b/pros/serial/devices/vex/message.py @@ -4,16 +4,14 @@ class Message(bytes): - def __new__(cls, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, - bookmarks: Dict[str, bytes] = None): + def __new__(cls, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, bookmarks: Dict[str, bytes] = None): if internal_rx is None: internal_rx = rx if isinstance(internal_rx, int): internal_rx = bytes([internal_rx]) return super().__new__(cls, internal_rx) - def __init__(self, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, - bookmarks: Dict[str, bytes] = None): + def __init__(self, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, bookmarks: Dict[str, bytes] = None): if internal_rx is None: internal_rx = rx if isinstance(internal_rx, int): @@ -35,4 +33,4 @@ def __setitem__(self, key, value): self.bookmarks[key] = value def __str__(self): - return 'TX:{}\tRX:{}'.format(bytes_to_str(self.tx), bytes_to_str(self.rx)) + return "TX:{}\tRX:{}".format(bytes_to_str(self.tx), bytes_to_str(self.rx)) diff --git a/pros/serial/devices/vex/stm32_device.py b/pros/serial/devices/vex/stm32_device.py index eecfdc47..0907ef0f 100644 --- a/pros/serial/devices/vex/stm32_device.py +++ b/pros/serial/devices/vex/stm32_device.py @@ -19,7 +19,7 @@ class STM32Device(GenericDevice, SystemDevice): ACK_BYTE = 0x79 NACK_BYTE = 0xFF - NUM_PAGES = 0xff + NUM_PAGES = 0xFF PAGE_SIZE = 0x2000 def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: bool = True): @@ -29,16 +29,16 @@ def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: if do_negoitate: # self.port.write(b'\0' * 255) if must_initialize: - self._txrx_command(0x7f, checksum=False) + self._txrx_command(0x7F, checksum=False) try: self.get(n_retries=0) except: - logger(__name__).info('Sending bootloader initialization') + logger(__name__).info("Sending bootloader initialization") time.sleep(0.01) self.port.rts = 0 for _ in itertools.repeat(None, times=3): time.sleep(0.01) - self._txrx_command(0x7f, checksum=False) + self._txrx_command(0x7F, checksum=False) time.sleep(0.01) self.get() @@ -47,12 +47,13 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft file.seek(0, 0) if file_len > (self.NUM_PAGES * self.PAGE_SIZE): raise VEXCommError( - f'File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)') + f"File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)" + ) - if hasattr(file, 'name'): + if hasattr(file, "name"): display_name = file.name else: - display_name = '(memory)' + display_name = "(memory)" if not preserve_fs: self.erase_all() @@ -60,7 +61,7 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft self.erase_memory(list(range(0, int(file_len / self.PAGE_SIZE) + 1))) address = 0x08000000 - with ui.progressbar(length=file_len, label=f'Uploading {display_name}') as progress: + with ui.progressbar(length=file_len, label=f"Uploading {display_name}") as progress: for i in range(0, file_len, 256): write_size = 256 if i + 256 > file_len: @@ -77,75 +78,77 @@ def scan_prosfs(self): @retries def get(self): - logger(__name__).info('STM32: Get') + logger(__name__).info("STM32: Get") self._txrx_command(0x00) n_bytes = self.port.read(1)[0] assert n_bytes == 11 data = self.port.read(n_bytes + 1) - logger(__name__).info(f'STM32 Bootloader version 0x{data[0]:x}') + logger(__name__).info(f"STM32 Bootloader version 0x{data[0]:x}") self.commands = data[1:] - logger(__name__).debug(f'STM32 Bootloader commands are: {bytes_to_str(data[1:])}') + logger(__name__).debug(f"STM32 Bootloader commands are: {bytes_to_str(data[1:])}") assert self.port.read(1)[0] == self.ACK_BYTE @retries def get_read_protection_status(self): - logger(__name__).info('STM32: Get ID & Read Protection Status') + logger(__name__).info("STM32: Get ID & Read Protection Status") self._txrx_command(0x01) data = self.port.read(3) - logger(__name__).debug(f'STM32 Bootloader Get Version & Read Protection Status is: {bytes_to_str(data)}') + logger(__name__).debug(f"STM32 Bootloader Get Version & Read Protection Status is: {bytes_to_str(data)}") assert self.port.read(1)[0] == self.ACK_BYTE @retries def get_id(self): - logger(__name__).info('STM32: Get PID') + logger(__name__).info("STM32: Get PID") self._txrx_command(0x02) n_bytes = self.port.read(1)[0] pid = self.port.read(n_bytes + 1) - logger(__name__).debug(f'STM32 Bootloader PID is {pid}') + logger(__name__).debug(f"STM32 Bootloader PID is {pid}") @retries def read_memory(self, address: int, n_bytes: int): - logger(__name__).info(f'STM32: Read {n_bytes} fromo 0x{address:x}') + logger(__name__).info(f"STM32: Read {n_bytes} fromo 0x{address:x}") assert 255 >= n_bytes > 0 self._txrx_command(0x11) - self._txrx_command(struct.pack('>I', address)) + self._txrx_command(struct.pack(">I", address)) self._txrx_command(n_bytes) return self.port.read(n_bytes) @retries def go(self, start_address: int): - logger(__name__).info(f'STM32: Go 0x{start_address:x}') + logger(__name__).info(f"STM32: Go 0x{start_address:x}") self._txrx_command(0x21) try: - self._txrx_command(struct.pack('>I', start_address), timeout=5.) + self._txrx_command(struct.pack(">I", start_address), timeout=5.0) except VEXCommError: - logger(__name__).warning('STM32 Bootloader did not acknowledge GO command. ' - 'The program may take a moment to begin running ' - 'or the device should be rebooted.') + logger(__name__).warning( + "STM32 Bootloader did not acknowledge GO command. " + "The program may take a moment to begin running " + "or the device should be rebooted." + ) @retries def write_memory(self, start_address: int, data: bytes): - logger(__name__).info(f'STM32: Write {len(data)} to 0x{start_address:x}') + logger(__name__).info(f"STM32: Write {len(data)} to 0x{start_address:x}") assert 0 < len(data) <= 256 if len(data) % 4 != 0: - data = data + (b'\0' * (4 - (len(data) % 4))) + data = data + (b"\0" * (4 - (len(data) % 4))) self._txrx_command(0x31) - self._txrx_command(struct.pack('>I', start_address)) + self._txrx_command(struct.pack(">I", start_address)) self._txrx_command(bytes([len(data) - 1, *data])) @retries def erase_all(self): - logger(__name__).info('STM32: Erase all pages') + logger(__name__).info("STM32: Erase all pages") if not self.commands[6] == 0x43: - raise VEXCommError('Standard erase not supported on this device (only extended erase)') + raise VEXCommError("Standard erase not supported on this device (only extended erase)") self._txrx_command(0x43) - self._txrx_command(0xff) + self._txrx_command(0xFF) @retries def erase_memory(self, page_numbers: List[int]): - logger(__name__).info(f'STM32: Erase pages: {page_numbers}') + logger(__name__).info(f"STM32: Erase pages: {page_numbers}") if not self.commands[6] == 0x43: - raise VEXCommError('Standard erase not supported on this device (only extended erase)') + raise VEXCommError("Standard erase not supported on this device (only extended erase)") assert 0 < len(page_numbers) <= 255 assert all([0 <= p <= 255 for p in page_numbers]) self._txrx_command(0x43) @@ -153,39 +156,39 @@ def erase_memory(self, page_numbers: List[int]): @retries def extended_erase(self, page_numbers: List[int]): - logger(__name__).info(f'STM32: Extended Erase pages: {page_numbers}') + logger(__name__).info(f"STM32: Extended Erase pages: {page_numbers}") if not self.commands[6] == 0x44: - raise IOError('Extended erase not supported on this device (only standard erase)') - assert 0 < len(page_numbers) < 0xfff0 - assert all([0 <= p <= 0xffff for p in page_numbers]) + raise IOError("Extended erase not supported on this device (only standard erase)") + assert 0 < len(page_numbers) < 0xFFF0 + assert all([0 <= p <= 0xFFFF for p in page_numbers]) self._txrx_command(0x44) - self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f'>{len(page_numbers)}H', *page_numbers)])) + self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f">{len(page_numbers)}H", *page_numbers)])) @retries def extended_erase_special(self, command: int): - logger(__name__).info(f'STM32: Extended special erase: {command:x}') + logger(__name__).info(f"STM32: Extended special erase: {command:x}") if not self.commands[6] == 0x44: - raise IOError('Extended erase not supported on this device (only standard erase)') - assert 0xfffd <= command <= 0xffff + raise IOError("Extended erase not supported on this device (only standard erase)") + assert 0xFFFD <= command <= 0xFFFF self._txrx_command(0x44) - self._txrx_command(struct.pack('>H', command)) + self._txrx_command(struct.pack(">H", command)) def _txrx_command(self, command: Union[int, bytes], timeout: float = 0.01, checksum: bool = True): self.port.read_all() if isinstance(command, bytes): message = command + (bytes([reduce(operator.xor, command, 0x00)]) if checksum else bytes([])) elif isinstance(command, int): - message = bytearray([command, ~command & 0xff] if checksum else [command]) + message = bytearray([command, ~command & 0xFF] if checksum else [command]) else: - raise ValueError(f'Expected command to be bytes or int but got {type(command)}') - logger(__name__).debug(f'STM32 TX: {bytes_to_str(message)}') + raise ValueError(f"Expected command to be bytes or int but got {type(command)}") + logger(__name__).debug(f"STM32 TX: {bytes_to_str(message)}") self.port.write(message) self.port.flush() start_time = time.time() while time.time() - start_time < timeout: data = self.port.read(1) if data and len(data) == 1: - logger(__name__).debug(f'STM32 RX: {data[0]} =?= {self.ACK_BYTE}') + logger(__name__).debug(f"STM32 RX: {data[0]} =?= {self.ACK_BYTE}") if data[0] == self.ACK_BYTE: return raise VEXCommError(f"Device never ACK'd to {command}", command) diff --git a/pros/serial/devices/vex/v5_device.py b/pros/serial/devices/vex/v5_device.py index 2720c0c1..1054f24e 100644 --- a/pros/serial/devices/vex/v5_device.py +++ b/pros/serial/devices/vex/v5_device.py @@ -1,10 +1,10 @@ import gzip import io +import platform import re import struct import time import typing -import platform from collections import defaultdict from configparser import ConfigParser from datetime import datetime, timedelta @@ -16,46 +16,53 @@ from semantic_version import Spec -from pros.common import ui from pros.common import * +from pros.common import ui from pros.common.utils import * from pros.conductor import Project from pros.serial import bytes_to_str, decode_bytes_to_str from pros.serial.ports import BasePort, list_all_comports + +from ..system_device import SystemDevice from .comm_error import VEXCommError from .crc import CRC from .message import Message from .vex_device import VEXDevice -from ..system_device import SystemDevice int_str = Union[int, str] def find_v5_ports(p_type: str): def filter_vex_ports(p): - return p.vid is not None and p.vid in [0x2888, 0x0501] or \ - p.name is not None and ('VEX' in p.name or 'V5' in p.name) + return ( + p.vid is not None + and p.vid in [0x2888, 0x0501] + or p.name is not None + and ("VEX" in p.name or "V5" in p.name) + ) def filter_v5_ports(p, locations, names): - return (p.location is not None and any([p.location.endswith(l) for l in locations])) or \ - (p.name is not None and any([n in p.name for n in names])) or \ - (p.description is not None and any([n in p.description for n in names])) + return ( + (p.location is not None and any([p.location.endswith(l) for l in locations])) + or (p.name is not None and any([n in p.name for n in names])) + or (p.description is not None and any([n in p.description for n in names])) + ) def filter_v5_ports_mac(p, device): - return (p.device is not None and p.device.endswith(device)) + return p.device is not None and p.device.endswith(device) ports = [p for p in list_all_comports() if filter_vex_ports(p)] # Initially try filtering based off of location or the name of the device. # Special logic for macOS - if platform.system() == 'Darwin': - user_ports = [p for p in ports if filter_v5_ports_mac(p, '3')] - system_ports = [p for p in ports if filter_v5_ports_mac(p, '1')] - joystick_ports = [p for p in ports if filter_v5_ports_mac(p, '2')] + if platform.system() == "Darwin": + user_ports = [p for p in ports if filter_v5_ports_mac(p, "3")] + system_ports = [p for p in ports if filter_v5_ports_mac(p, "1")] + joystick_ports = [p for p in ports if filter_v5_ports_mac(p, "2")] else: - user_ports = [p for p in ports if filter_v5_ports(p, ['2'], ['User'])] - system_ports = [p for p in ports if filter_v5_ports(p, ['0'], ['System', 'Communications'])] - joystick_ports = [p for p in ports if filter_v5_ports(p, ['1'], ['Controller'])] + user_ports = [p for p in ports if filter_v5_ports(p, ["2"], ["User"])] + system_ports = [p for p in ports if filter_v5_ports(p, ["0"], ["System", "Communications"])] + joystick_ports = [p for p in ports if filter_v5_ports(p, ["1"], ["Controller"])] # Fallback for when a brain port's location is not detected properly if len(user_ports) != len(system_ports): @@ -65,31 +72,31 @@ def filter_v5_ports_mac(p, device): user_ports = [p for p in ports if p not in system_ports and p not in joystick_ports] if len(user_ports) == len(system_ports) and len(user_ports) > 0: - if p_type.lower() == 'user': + if p_type.lower() == "user": return user_ports - elif p_type.lower() == 'system': + elif p_type.lower() == "system": return system_ports + joystick_ports else: - raise ValueError(f'Invalid port type specified: {p_type}') + raise ValueError(f"Invalid port type specified: {p_type}") # None of the typical filters worked, so if there are only two ports, then the lower one is always* # the USER? port (*always = I haven't found a guarantee) if len(ports) == 2: # natural sort based on: https://stackoverflow.com/a/16090640 def natural_key(chunk: str): - return [int(text) if text.isdigit() else text.lower() for text in re.split(r'(\d+)', chunk)] + return [int(text) if text.isdigit() else text.lower() for text in re.split(r"(\d+)", chunk)] ports = sorted(ports, key=lambda p: natural_key(p.device)) - if p_type.lower() == 'user': + if p_type.lower() == "user": return [ports[1]] - elif p_type.lower() == 'system': + elif p_type.lower() == "system": # check if ports contain the word Brain in the description and return that port for port in ports: if "Brain" in port.description: return [port] return [ports[0], *joystick_ports] else: - raise ValueError(f'Invalid port type specified: {p_type}') + raise ValueError(f"Invalid port type specified: {p_type}") # these can now also be used as user ports if len(joystick_ports) > 0: # and p_type.lower() == 'system': return joystick_ports @@ -109,10 +116,10 @@ def wrapped(device, *args, **kwargs): return wrapped -def compress_file(file: BinaryIO, file_len: int, label='Compressing binary') -> Tuple[BinaryIO, int]: +def compress_file(file: BinaryIO, file_len: int, label="Compressing binary") -> Tuple[BinaryIO, int]: buf = io.BytesIO() with ui.progressbar(length=file_len, label=label) as progress: - with gzip.GzipFile(fileobj=buf, mode='wb', mtime=0) as f: + with gzip.GzipFile(fileobj=buf, mode="wb", mtime=0) as f: while True: data = file.read(16 * 1024) if not data: @@ -126,8 +133,8 @@ def compress_file(file: BinaryIO, file_len: int, label='Compressing binary') -> class V5Device(VEXDevice, SystemDevice): - vid_map = {'user': 1, 'system': 15, 'rms': 16, 'pros': 24, 'mw': 32} # type: Dict[str, int] - channel_map = {'pit': 0, 'download': 1} # type: Dict[str, int] + vid_map = {"user": 1, "system": 15, "rms": 16, "pros": 24, "mw": 32} # type: Dict[str, int] + channel_map = {"pit": 0, "download": 1} # type: Dict[str, int] class FTCompleteOptions(IntEnum): DONT_RUN = 0 @@ -152,21 +159,25 @@ class ControllerFlags(IntFlag): def __init__(self, data: tuple): from semantic_version import Version - self.system_version = Version('{}.{}.{}-{}.{}'.format(*data[0:5])) + + self.system_version = Version("{}.{}.{}-{}.{}".format(*data[0:5])) self.product = V5Device.SystemVersion.Product(data[5]) self.product_flags = self.flag_map[self.product](data[6]) def __str__(self): - return f'System Version: {self.system_version}\n' \ - f' Product: {self.product.name}\n' \ - f' Product Flags: {self.product_flags.value:x}' + return ( + f"System Version: {self.system_version}\n" + f" Product: {self.product.name}\n" + f" Product Flags: {self.product_flags.value:x}" + ) class SystemStatus(object): def __init__(self, data: tuple): from semantic_version import Version - self.system_version = Version('{}.{}.{}-{}'.format(*data[0:4])) - self.cpu0_version = Version('{}.{}.{}-{}'.format(*data[4:8])) - self.cpu1_version = Version('{}.{}.{}-{}'.format(*data[8:12])) + + self.system_version = Version("{}.{}.{}-{}".format(*data[0:4])) + self.cpu0_version = Version("{}.{}.{}-{}".format(*data[4:8])) + self.cpu1_version = Version("{}.{}.{}-{}".format(*data[8:12])) self.touch_version = data[12] self.system_id = data[13] @@ -175,11 +186,11 @@ def __getitem__(self, item): def __init__(self, port: BasePort): self._status = None - self._serial_cache = b'' + self._serial_cache = b"" super().__init__(port) class DownloadChannel(object): - def __init__(self, device: 'V5Device', timeout: float = 5.): + def __init__(self, device: "V5Device", timeout: float = 5.0): self.device = device self.timeout = timeout self.did_switch = False @@ -187,32 +198,34 @@ def __init__(self, device: 'V5Device', timeout: float = 5.): def __enter__(self): version = self.device.query_system_version() if version.product == V5Device.SystemVersion.Product.CONTROLLER: - self.device.default_timeout = 2. + self.device.default_timeout = 2.0 if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: - raise VEXCommError('V5 Controller doesn\'t appear to be connected to a V5 Brain', version) - ui.echo('Transferring V5 to download channel') - self.device.ft_transfer_channel('download') + raise VEXCommError("V5 Controller doesn't appear to be connected to a V5 Brain", version) + ui.echo("Transferring V5 to download channel") + self.device.ft_transfer_channel("download") self.did_switch = True - logger(__name__).debug('Sleeping for a while to let V5 start channel transfer') - time.sleep(.25) # wait at least 250ms before starting to poll controller if it's connected yet + logger(__name__).debug("Sleeping for a while to let V5 start channel transfer") + time.sleep(0.25) # wait at least 250ms before starting to poll controller if it's connected yet version = self.device.query_system_version() start_time = time.time() # ask controller every 250 ms if it's connected until it is - while V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags and \ - time.time() - start_time < self.timeout: + while ( + V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags + and time.time() - start_time < self.timeout + ): version = self.device.query_system_version() time.sleep(0.25) if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: - raise VEXCommError('Could not transfer V5 Controller to download channel', version) - logger(__name__).info('V5 should been transferred to higher bandwidth download channel') + raise VEXCommError("Could not transfer V5 Controller to download channel", version) + logger(__name__).info("V5 should been transferred to higher bandwidth download channel") return self else: return self def __exit__(self, *exc): if self.did_switch: - self.device.ft_transfer_channel('pit') - ui.echo('V5 has been transferred back to pit channel') + self.device.ft_transfer_channel("pit") + ui.echo("V5 has been transferred back to pit channel") @property def status(self): @@ -222,144 +235,190 @@ def status(self): @property def can_compress(self): - return self.status['system_version'] in Spec('>=1.0.5') + return self.status["system_version"] in Spec(">=1.0.5") @property def is_wireless(self): version = self.query_system_version() - return version.product == V5Device.SystemVersion.Product.CONTROLLER and \ - V5Device.SystemVersion.ControllerFlags.CONNECTED in version.product_flags + return ( + version.product == V5Device.SystemVersion.Product.CONTROLLER + and V5Device.SystemVersion.ControllerFlags.CONNECTED in version.product_flags + ) def generate_cold_hash(self, project: Project, extra: dict): keys = {k: t.version for k, t in project.templates.items()} keys.update(extra) - from hashlib import md5 from base64 import b64encode - msg = str(sorted(keys, key=lambda t: t[0])).encode('ascii') - name = b64encode(md5(msg).digest()).rstrip(b'=').decode('ascii') - if Spec('<=1.0.0-27').match(self.status['cpu0_version']): + from hashlib import md5 + + msg = str(sorted(keys, key=lambda t: t[0])).encode("ascii") + name = b64encode(md5(msg).digest()).rstrip(b"=").decode("ascii") + if Spec("<=1.0.0-27").match(self.status["cpu0_version"]): # Bug prevents linked files from being > 18 characters long. # 17 characters is probably good enough for hash, so no need to fail out name = name[:17] return name def upload_project(self, project: Project, **kwargs): - assert project.target == 'v5' + assert project.target == "v5" monolith_path = project.location.joinpath(project.output) if monolith_path.exists(): - logger(__name__).debug(f'Monolith exists! ({monolith_path})') - if 'hot_output' in project.templates['kernel'].metadata and \ - 'cold_output' in project.templates['kernel'].metadata: - hot_path = project.location.joinpath(project.templates['kernel'].metadata['hot_output']) - cold_path = project.location.joinpath(project.templates['kernel'].metadata['cold_output']) + logger(__name__).debug(f"Monolith exists! ({monolith_path})") + if ( + "hot_output" in project.templates["kernel"].metadata + and "cold_output" in project.templates["kernel"].metadata + ): + hot_path = project.location.joinpath(project.templates["kernel"].metadata["hot_output"]) + cold_path = project.location.joinpath(project.templates["kernel"].metadata["cold_output"]) upload_hot_cold = False if hot_path.exists() and cold_path.exists(): - logger(__name__).debug(f'Hot and cold files exist! ({hot_path}; {cold_path})') + logger(__name__).debug(f"Hot and cold files exist! ({hot_path}; {cold_path})") if monolith_path.exists(): monolith_mtime = monolith_path.stat().st_mtime hot_mtime = hot_path.stat().st_mtime - logger(__name__).debug(f'Monolith last modified: {monolith_mtime}') - logger(__name__).debug(f'Hot last modified: {hot_mtime}') + logger(__name__).debug(f"Monolith last modified: {monolith_mtime}") + logger(__name__).debug(f"Hot last modified: {hot_mtime}") if hot_mtime > monolith_mtime: upload_hot_cold = True - logger(__name__).debug('Hot file is newer than monolith!') + logger(__name__).debug("Hot file is newer than monolith!") else: upload_hot_cold = True if upload_hot_cold: - with hot_path.open(mode='rb') as hot: - with cold_path.open(mode='rb') as cold: - kwargs['linked_file'] = cold - kwargs['linked_remote_name'] = self.generate_cold_hash(project, {}) - kwargs['linked_file_addr'] = int( - project.templates['kernel'].metadata.get('cold_addr', 0x03800000)) - kwargs['addr'] = int(project.templates['kernel'].metadata.get('hot_addr', 0x07800000)) + with hot_path.open(mode="rb") as hot: + with cold_path.open(mode="rb") as cold: + kwargs["linked_file"] = cold + kwargs["linked_remote_name"] = self.generate_cold_hash(project, {}) + kwargs["linked_file_addr"] = int( + project.templates["kernel"].metadata.get("cold_addr", 0x03800000) + ) + kwargs["addr"] = int(project.templates["kernel"].metadata.get("hot_addr", 0x07800000)) return self.write_program(hot, **kwargs) if not monolith_path.exists(): - raise ui.dont_send(Exception('No output files were found! Have you built your project?')) - with monolith_path.open(mode='rb') as pf: + raise ui.dont_send(Exception("No output files were found! Have you built your project?")) + with monolith_path.open(mode="rb") as pf: return self.write_program(pf, **kwargs) def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigParser = None, **kwargs): project_ini = ConfigParser() - from semantic_version import Spec - default_icon = 'USER902x.bmp' if Spec('>=1.0.0-22').match(self.status['cpu0_version']) else 'USER999x.bmp' - project_ini['project'] = { - 'version': str(kwargs.get('ide_version') or get_version()), - 'ide': str(kwargs.get('ide') or 'PROS') + default_icon = "USER902x.bmp" if Spec(">=1.0.0-22").match(self.status["cpu0_version"]) else "USER999x.bmp" + project_ini["project"] = { + "version": str(kwargs.get("ide_version") or get_version()), + "ide": str(kwargs.get("ide") or "PROS"), } - project_ini['program'] = { - 'version': kwargs.get('version', '0.0.0') or '0.0.0', - 'name': remote_name, - 'slot': slot, - 'icon': kwargs.get('icon', default_icon) or default_icon, - 'description': kwargs.get('description', 'Created with PROS'), - 'date': datetime.now().isoformat() + project_ini["program"] = { + "version": kwargs.get("version", "0.0.0") or "0.0.0", + "name": remote_name, + "slot": slot, + "icon": kwargs.get("icon", default_icon) or default_icon, + "description": kwargs.get("description", "Created with PROS"), + "date": datetime.now().isoformat(), } if ini: project_ini.update(ini) with StringIO() as ini_str: project_ini.write(ini_str) - logger(__name__).info(f'Created ini: {ini_str.getvalue()}') + logger(__name__).info(f"Created ini: {ini_str.getvalue()}") return ini_str.getvalue() @with_download_channel - def write_program(self, file: typing.BinaryIO, remote_name: str = None, ini: ConfigParser = None, slot: int = 0, - file_len: int = -1, run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, - target: str = 'flash', quirk: int = 0, linked_file: Optional[typing.BinaryIO] = None, - linked_remote_name: Optional[str] = None, linked_file_addr: Optional[int] = None, - compress_bin: bool = True, **kwargs): + def write_program( + self, + file: typing.BinaryIO, + remote_name: str = None, + ini: ConfigParser = None, + slot: int = 0, + file_len: int = -1, + run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, + target: str = "flash", + quirk: int = 0, + linked_file: Optional[typing.BinaryIO] = None, + linked_remote_name: Optional[str] = None, + linked_file_addr: Optional[int] = None, + compress_bin: bool = True, + **kwargs, + ): with ui.Notification(): action_string = f'Uploading program "{remote_name}"' finish_string = f'Finished uploading "{remote_name}"' - if hasattr(file, 'name'): - action_string += f' ({remote_name if remote_name else Path(file.name).name})' - finish_string += f' ({remote_name if remote_name else Path(file.name).name})' - action_string += f' to V5 slot {slot + 1} on {self.port}' + if hasattr(file, "name"): + action_string += f" ({remote_name if remote_name else Path(file.name).name})" + finish_string += f" ({remote_name if remote_name else Path(file.name).name})" + action_string += f" to V5 slot {slot + 1} on {self.port}" if compress_bin: - action_string += ' (compressed)' + action_string += " (compressed)" ui.echo(action_string) - remote_base = f'slot_{slot + 1}' - if target == 'ddr': - self.write_file(file, f'{remote_base}.bin', file_len=file_len, type='bin', - target='ddr', run_after=run_after, linked_filename=linked_remote_name, **kwargs) + remote_base = f"slot_{slot + 1}" + if target == "ddr": + self.write_file( + file, + f"{remote_base}.bin", + file_len=file_len, + type="bin", + target="ddr", + run_after=run_after, + linked_filename=linked_remote_name, + **kwargs, + ) return if not isinstance(ini, ConfigParser): ini = ConfigParser() if not remote_name: remote_name = file.name if len(remote_name) > 23: - logger(__name__).info('Truncating remote name to {} for length.'.format(remote_name[:20])) + logger(__name__).info("Truncating remote name to {} for length.".format(remote_name[:20])) remote_name = remote_name[:23] ini_file = self.generate_ini_file(remote_name=remote_name, slot=slot, ini=ini, **kwargs) - logger(__name__).info(f'Created ini: {ini_file}') + logger(__name__).info(f"Created ini: {ini_file}") if linked_file is not None: - self.upload_library(linked_file, remote_name=linked_remote_name, addr=linked_file_addr, - compress=compress_bin, force_upload=kwargs.pop('force_upload_linked', False)) - bin_kwargs = {k: v for k, v in kwargs.items() if v in ['addr']} - if (quirk & 0xff) == 1: + self.upload_library( + linked_file, + remote_name=linked_remote_name, + addr=linked_file_addr, + compress=compress_bin, + force_upload=kwargs.pop("force_upload_linked", False), + ) + bin_kwargs = {k: v for k, v in kwargs.items() if v in ["addr"]} + if (quirk & 0xFF) == 1: # WRITE BIN FILE - self.write_file(file, f'{remote_base}.bin', file_len=file_len, type='bin', run_after=run_after, - linked_filename=linked_remote_name, compress=compress_bin, **bin_kwargs, **kwargs) - with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: + self.write_file( + file, + f"{remote_base}.bin", + file_len=file_len, + type="bin", + run_after=run_after, + linked_filename=linked_remote_name, + compress=compress_bin, + **bin_kwargs, + **kwargs, + ) + with BytesIO(ini_file.encode(encoding="ascii")) as ini_bin: # WRITE INI FILE - self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) - elif (quirk & 0xff) == 0: + self.write_file(ini_bin, f"{remote_base}.ini", type="ini", **kwargs) + elif (quirk & 0xFF) == 0: # STOP PROGRAM - self.execute_program_file('', run=False) - with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: + self.execute_program_file("", run=False) + with BytesIO(ini_file.encode(encoding="ascii")) as ini_bin: # WRITE INI FILE - self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) + self.write_file(ini_bin, f"{remote_base}.ini", type="ini", **kwargs) # WRITE BIN FILE - self.write_file(file, f'{remote_base}.bin', file_len=file_len, type='bin', run_after=run_after, - linked_filename=linked_remote_name, compress=compress_bin, **bin_kwargs, **kwargs) + self.write_file( + file, + f"{remote_base}.bin", + file_len=file_len, + type="bin", + run_after=run_after, + linked_filename=linked_remote_name, + compress=compress_bin, + **bin_kwargs, + **kwargs, + ) else: - raise ValueError(f'Unknown quirk option: {quirk}') - ui.finalize('upload', f'{finish_string} to V5') + raise ValueError(f"Unknown quirk option: {quirk}") + ui.finalize("upload", f"{finish_string} to V5") - def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, - target_name: Optional[str] = None): + def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, target_name: Optional[str] = None): """ Uses algorithms, for loops, and if statements to determine what files should be removed @@ -379,11 +438,8 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, vid = self.vid_map[vid.lower()] # assume all libraries unused_libraries = [ - (vid, l['filename']) - for l - in [self.get_file_metadata_by_idx(i) - for i in range(0, self.get_dir_count(vid=vid)) - ] + (vid, l["filename"]) + for l in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid=vid))] ] if name is not None: if (vid, name) in unused_libraries: @@ -395,85 +451,97 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, programs: Dict[str, Dict] = { # need the linked file metadata, so we have to use the get_file_metadata_by_name command - p['filename']: self.get_file_metadata_by_name(p['filename'], vid='user') - for p - in [self.get_file_metadata_by_idx(i) - for i in range(0, self.get_dir_count(vid='user'))] - if p['type'] == 'bin' + p["filename"]: self.get_file_metadata_by_name(p["filename"], vid="user") + for p in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid="user"))] + if p["type"] == "bin" } library_usage: Dict[Tuple[int, str], List[str]] = defaultdict(list) for program_name, metadata in programs.items(): - library_usage[(metadata['linked_vid'], metadata['linked_filename'])].append(program_name) + library_usage[(metadata["linked_vid"], metadata["linked_filename"])].append(program_name) orphaned_files: List[Union[str, Tuple[int, str]]] = [] for link, program_names in library_usage.items(): linked_vid, linked_name = link if name is not None and linked_vid == vid and linked_name == name: - logger(__name__).debug(f'{program_names} will be removed because the library will be replaced') + logger(__name__).debug(f"{program_names} will be removed because the library will be replaced") orphaned_files.extend(program_names) elif linked_vid != 0: # linked_vid == 0 means there's no link. Can't be orphaned if there's no link if link in unused_libraries: # the library is being used - logger(__name__).debug(f'{link} is being used') + logger(__name__).debug(f"{link} is being used") unused_libraries.remove(link) used_libraries.append(link) else: try: self.get_file_metadata_by_name(linked_name, vid=linked_vid) - logger(__name__).debug(f'{link} exists') + logger(__name__).debug(f"{link} exists") used_libraries.extend(link) except VEXCommError as e: logger(__name__).debug(dont_send(e)) - logger(__name__).debug(f'{program_names} will be removed because {link} does not exist') + logger(__name__).debug(f"{program_names} will be removed because {link} does not exist") orphaned_files.extend(program_names) orphaned_files.extend(unused_libraries) if target_name is not None and target_name in orphaned_files: # the file will be overwritten anyway orphaned_files.remove(target_name) if len(orphaned_files) > 0: - logger(__name__).warning(f'Removing {len(orphaned_files)} orphaned file(s) ({orphaned_files})') + logger(__name__).warning(f"Removing {len(orphaned_files)} orphaned file(s) ({orphaned_files})") for file in orphaned_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: - self.erase_file(file_name=file, erase_all=True, vid='user') + self.erase_file(file_name=file, erase_all=True, vid="user") if len(used_libraries) > 3: libraries = [ - (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)['timestamp']) - for linked_vid, linked_name - in used_libraries + (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)["timestamp"]) + for linked_vid, linked_name in used_libraries ] - library_usage_timestamps = sorted([ - ( - linked_vid, - linked_name, - # get the most recent timestamp of the library and all files linking to it - max(linked_timestamp, *[programs[p]['timestamp'] for p in library_usage[(linked_vid, linked_name)]]) - ) - for linked_vid, linked_name, linked_timestamp - in libraries - ], key=lambda t: t[2]) + library_usage_timestamps = sorted( + [ + ( + linked_vid, + linked_name, + # get the most recent timestamp of the library and all files linking to it + max( + linked_timestamp, + *[programs[p]["timestamp"] for p in library_usage[(linked_vid, linked_name)]], + ), + ) + for linked_vid, linked_name, linked_timestamp in libraries + ], + key=lambda t: t[2], + ) evicted_files: List[Union[str, Tuple[int, str]]] = [] - evicted_file_list = '' + evicted_file_list = "" for evicted_library in library_usage_timestamps[:3]: evicted_files.append(evicted_library[0:2]) evicted_files.extend(library_usage[evicted_library[0:2]]) - evicted_file_list += evicted_library[1] + ', ' - evicted_file_list += ', '.join(library_usage[evicted_file_list[0:2]]) + evicted_file_list += evicted_library[1] + ", " + evicted_file_list += ", ".join(library_usage[evicted_file_list[0:2]]) evicted_file_list = evicted_file_list[:2] # remove last ", " assert len(evicted_files) > 0 - if confirm(f'There are too many files on the V5. PROS can remove the following suggested old files: ' - f'{evicted_file_list}', - title='Confirm file eviction plan:'): + if confirm( + f"There are too many files on the V5. PROS can remove the following suggested old files: " + f"{evicted_file_list}", + title="Confirm file eviction plan:", + ): for file in evicted_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: - self.erase_file(file_name=file, erase_all=True, vid='user') - - def upload_library(self, file: typing.BinaryIO, remote_name: str = None, file_len: int = -1, vid: int_str = 'pros', - force_upload: bool = False, compress: bool = True, **kwargs): + self.erase_file(file_name=file, erase_all=True, vid="user") + + def upload_library( + self, + file: typing.BinaryIO, + remote_name: str = None, + file_len: int = -1, + vid: int_str = "pros", + force_upload: bool = False, + compress: bool = True, + **kwargs, + ): """ Upload a file used for linking. Contains the logic to check if the file is already present in the filesystem and to prompt the user if we need to evict a library (and user programs). @@ -483,7 +551,7 @@ def upload_library(self, file: typing.BinaryIO, remote_name: str = None, file_le if not remote_name: remote_name = file.name if len(remote_name) > 23: - logger(__name__).info('Truncating remote name to {} for length.'.format(remote_name[:23])) + logger(__name__).info("Truncating remote name to {} for length.".format(remote_name[:23])) remote_name = remote_name[:23] if file_len < 0: @@ -491,7 +559,7 @@ def upload_library(self, file: typing.BinaryIO, remote_name: str = None, file_le file.seek(0, 0) if compress and self.can_compress: - file, file_len = compress_file(file, file_len, label='Compressing library') + file, file_len = compress_file(file, file_len, label="Compressing library") crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) @@ -500,90 +568,118 @@ def upload_library(self, file: typing.BinaryIO, remote_name: str = None, file_le try: response = self.get_file_metadata_by_name(remote_name, vid) logger(__name__).debug(response) - logger(__name__).debug({'file len': file_len, 'crc': crc32}) - if response['size'] == file_len and response['crc'] == crc32: - ui.echo('Library is already onboard V5') + logger(__name__).debug({"file len": file_len, "crc": crc32}) + if response["size"] == file_len and response["crc"] == crc32: + ui.echo("Library is already onboard V5") return else: - logger(__name__).warning(f'Library onboard doesn\'t match! ' - f'Length was {response["size"]} but expected {file_len} ' - f'CRC: was {response["crc"]:x} but expected {crc32:x}') + logger(__name__).warning( + f"Library onboard doesn't match! " + f'Length was {response["size"]} but expected {file_len} ' + f'CRC: was {response["crc"]:x} but expected {crc32:x}' + ) except VEXCommError as e: logger(__name__).debug(e) else: - logger(__name__).info('Skipping already-uploaded checks') + logger(__name__).info("Skipping already-uploaded checks") - logger(__name__).debug('Going to worry about uploading the file now') - self.ensure_library_space(remote_name, vid, ) + logger(__name__).debug("Going to worry about uploading the file now") + self.ensure_library_space( + remote_name, + vid, + ) self.write_file(file, remote_name, file_len, vid=vid, **kwargs) - def read_file(self, file: typing.IO[bytes], remote_file: str, vid: int_str = 'user', target: int_str = 'flash', - addr: Optional[int] = None, file_len: Optional[int] = None): + def read_file( + self, + file: typing.IO[bytes], + remote_file: str, + vid: int_str = "user", + target: int_str = "flash", + addr: Optional[int] = None, + file_len: Optional[int] = None, + ): if isinstance(vid, str): vid = self.vid_map[vid.lower()] if addr is None: metadata = self.get_file_metadata_by_name(remote_file, vid=vid) - addr = metadata['addr'] + addr = metadata["addr"] wireless = self.is_wireless - ft_meta = self.ft_initialize(remote_file, function='download', vid=vid, target=target, addr=addr) + ft_meta = self.ft_initialize(remote_file, function="download", vid=vid, target=target, addr=addr) if file_len is None: - file_len = ft_meta['file_size'] + file_len = ft_meta["file_size"] if wireless and file_len > 0x25000: - confirm(f'You\'re about to download {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider downloading directly with a wire.', abort=True, default=False) - - max_packet_size = ft_meta['max_packet_size'] - with ui.progressbar(length=file_len, label='Downloading {}'.format(remote_file)) as progress: + confirm( + f"You're about to download {file_len} bytes wirelessly. This could take some time, and you should " + f"consider downloading directly with a wire.", + abort=True, + default=False, + ) + + max_packet_size = ft_meta["max_packet_size"] + with ui.progressbar(length=file_len, label="Downloading {}".format(remote_file)) as progress: for i in range(0, file_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > file_len: packet_size = file_len - i file.write(self.ft_read(addr + i, packet_size)) progress.update(packet_size) - logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) + logger(__name__).debug("Completed {} of {} bytes".format(i + packet_size, file_len)) self.ft_complete() - def write_file(self, file: typing.BinaryIO, remote_file: str, file_len: int = -1, - run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, linked_filename: Optional[str] = None, - linked_vid: int_str = 'pros', compress: bool = False, **kwargs): + def write_file( + self, + file: typing.BinaryIO, + remote_file: str, + file_len: int = -1, + run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, + linked_filename: Optional[str] = None, + linked_vid: int_str = "pros", + compress: bool = False, + **kwargs, + ): if file_len < 0: file_len = file.seek(0, 2) file.seek(0, 0) display_name = remote_file - if hasattr(file, 'name'): - display_name = f'{remote_file} ({Path(file.name).name})' + if hasattr(file, "name"): + display_name = f"{remote_file} ({Path(file.name).name})" if compress and self.can_compress: file, file_len = compress_file(file, file_len) if self.is_wireless and file_len > 0x25000: - confirm(f'You\'re about to upload {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider uploading directly with a wire.', abort=True, default=False) + confirm( + f"You're about to upload {file_len} bytes wirelessly. This could take some time, and you should " + f"consider uploading directly with a wire.", + abort=True, + default=False, + ) crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) - addr = kwargs.get('addr', 0x03800000) - logger(__name__).info('Transferring {} ({} bytes) to the V5 from {}'.format(remote_file, file_len, file)) - ft_meta = self.ft_initialize(remote_file, function='upload', length=file_len, crc=crc32, **kwargs) + addr = kwargs.get("addr", 0x03800000) + logger(__name__).info("Transferring {} ({} bytes) to the V5 from {}".format(remote_file, file_len, file)) + ft_meta = self.ft_initialize(remote_file, function="upload", length=file_len, crc=crc32, **kwargs) if linked_filename is not None: - logger(__name__).debug('Setting file link') + logger(__name__).debug("Setting file link") self.ft_set_link(linked_filename, vid=linked_vid) - assert ft_meta['file_size'] >= file_len + assert ft_meta["file_size"] >= file_len if len(remote_file) > 24: - logger(__name__).info('Truncating {} to {} due to length'.format(remote_file, remote_file[:24])) + logger(__name__).info("Truncating {} to {} due to length".format(remote_file, remote_file[:24])) remote_file = remote_file[:24] - max_packet_size = int(ft_meta['max_packet_size'] / 2) - with ui.progressbar(length=file_len, label='Uploading {}'.format(display_name)) as progress: + max_packet_size = int(ft_meta["max_packet_size"] / 2) + with ui.progressbar(length=file_len, label="Uploading {}".format(display_name)) as progress: for i in range(0, file_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > file_len: packet_size = file_len - i - logger(__name__).debug('Writing {} bytes at 0x{:02X}'.format(packet_size, addr + i)) + logger(__name__).debug("Writing {} bytes at 0x{:02X}".format(packet_size, addr + i)) self.ft_write(addr + i, file.read(packet_size)) progress.update(packet_size) - logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) - logger(__name__).debug('Data transfer complete, sending ft complete') - if compress and self.status['system_version'] in Spec('>=1.0.5'): - logger(__name__).info('Closing gzip file') + logger(__name__).debug("Completed {} of {} bytes".format(i + packet_size, file_len)) + logger(__name__).debug("Data transfer complete, sending ft complete") + if compress and self.status["system_version"] in Spec(">=1.0.5"): + logger(__name__).info("Closing gzip file") file.close() self.ft_complete(options=run_after) @@ -594,18 +690,18 @@ def capture_screen(self) -> Tuple[List[List[int]], int, int]: file_size = width * height * 4 # ARGB rx_io = BytesIO() - self.read_file(rx_io, '', vid='system', target='screen', addr=0, file_len=file_size) + self.read_file(rx_io, "", vid="system", target="screen", addr=0, file_len=file_size) rx = rx_io.getvalue() - rx = struct.unpack('<{}I'.format(len(rx) // 4), rx) + rx = struct.unpack("<{}I".format(len(rx) // 4), rx) data = [[] for _ in range(height)] for y in range(height): for x in range(width - 1): if x < 480: px = rx[y * width + x] - data[y].append((px & 0xff0000) >> 16) - data[y].append((px & 0x00ff00) >> 8) - data[y].append(px & 0x0000ff) + data[y].append((px & 0xFF0000) >> 16) + data[y].append((px & 0x00FF00) >> 8) + data[y].append(px & 0x0000FF) return data, 480, height @@ -613,8 +709,8 @@ def used_slots(self) -> Dict[int, Optional[str]]: with ui.Notification(): rv = {} for slot in range(1, 9): - ini = self.read_ini(f'slot_{slot}.ini') - rv[slot] = ini['program']['name'] if ini is not None else None + ini = self.read_ini(f"slot_{slot}.ini") + rv[slot] = ini["program"]["name"] if ini is not None else None return rv def read_ini(self, remote_name: str) -> Optional[ConfigParser]: @@ -623,85 +719,96 @@ def read_ini(self, remote_name: str) -> Optional[ConfigParser]: self.read_file(rx_io, remote_name) config = ConfigParser() rx_io.seek(0, 0) - config.read_string(rx_io.read().decode('ascii')) + config.read_string(rx_io.read().decode("ascii")) return config - except VEXCommError as e: + except VEXCommError: return None @retries def query_system_version(self) -> SystemVersion: - logger(__name__).debug('Sending simple 0xA408 command') - ret = self._txrx_simple_struct(0xA4, '>8B') - logger(__name__).debug('Completed simple 0xA408 command') + logger(__name__).debug("Sending simple 0xA408 command") + ret = self._txrx_simple_struct(0xA4, ">8B") + logger(__name__).debug("Completed simple 0xA408 command") return V5Device.SystemVersion(ret) @retries def ft_transfer_channel(self, channel: int_str): - logger(__name__).debug(f'Transferring to {channel} channel') - logger(__name__).debug('Sending ext 0x10 command') + logger(__name__).debug(f"Transferring to {channel} channel") + logger(__name__).debug("Sending ext 0x10 command") if isinstance(channel, str): channel = self.channel_map[channel] assert isinstance(channel, int) and 0 <= channel <= 1 - self._txrx_ext_packet(0x10, struct.pack('<2B', 1, channel), rx_length=0) - logger(__name__).debug('Completed ext 0x10 command') + self._txrx_ext_packet(0x10, struct.pack("<2B", 1, channel), rx_length=0) + logger(__name__).debug("Completed ext 0x10 command") @retries def ft_initialize(self, file_name: str, **kwargs) -> Dict[str, Any]: - logger(__name__).debug('Sending ext 0x11 command') + logger(__name__).debug("Sending ext 0x11 command") options = { - 'function': 'upload', - 'target': 'flash', - 'vid': 'user', - 'overwrite': True, - 'options': 0, - 'length': 0, - 'addr': 0x03800000, - 'crc': 0, - 'type': 'bin', - 'timestamp': datetime.now(), - 'version': 0x01_00_00_00, - 'name': file_name + "function": "upload", + "target": "flash", + "vid": "user", + "overwrite": True, + "options": 0, + "length": 0, + "addr": 0x03800000, + "crc": 0, + "type": "bin", + "timestamp": datetime.now(), + "version": 0x01_00_00_00, + "name": file_name, } options.update({k: v for k, v in kwargs.items() if k in options and v is not None}) - if isinstance(options['function'], str): - options['function'] = {'upload': 1, 'download': 2}[options['function'].lower()] - if isinstance(options['target'], str): - options['target'] = {'ddr': 0, 'flash': 1, 'screen': 2}[options['target'].lower()] - if isinstance(options['vid'], str): - options['vid'] = self.vid_map[options['vid'].lower()] - if isinstance(options['type'], str): - options['type'] = options['type'].encode(encoding='ascii') - if isinstance(options['name'], str): - options['name'] = options['name'].encode(encoding='ascii') - options['options'] |= 1 if options['overwrite'] else 0 - options['timestamp'] = int((options['timestamp'] - datetime(2000, 1, 1)).total_seconds()) - - logger(__name__).debug('Initializing file transfer w/: {}'.format(options)) - tx_payload = struct.pack("<4B3I4s2I24s", options['function'], options['target'], options['vid'], - options['options'], options['length'], options['addr'], options['crc'], - options['type'], options['timestamp'], options['version'], options['name']) - rx = self._txrx_ext_struct(0x11, tx_payload, " bytearray: - logger(__name__).debug('Sending ext 0x14 command') + logger(__name__).debug("Sending ext 0x14 command") actual_n_bytes = n_bytes + (0 if n_bytes % 4 == 0 else 4 - n_bytes % 4) ui.logger(__name__).debug(dict(actual_n_bytes=actual_n_bytes, addr=addr)) tx_payload = struct.pack(" int: - logger(__name__).debug('Sending ext 0x16 command') + def get_dir_count(self, vid: int_str = 1, options: int = 0) -> int: + logger(__name__).debug("Sending ext 0x16 command") if isinstance(vid, str): vid = self.vid_map[vid.lower()] tx_payload = struct.pack("<2B", vid, options) ret = self._txrx_ext_struct(0x16, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x17 command') + def get_file_metadata_by_idx(self, file_idx: int, options: int = 0) -> Dict[str, Any]: + logger(__name__).debug("Sending ext 0x17 command") tx_payload = struct.pack("<2B", file_idx, options) rx = self._txrx_ext_struct(0x17, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x19 command') + def get_file_metadata_by_name(self, file_name: str, vid: int_str = 1, options: int = 0) -> Dict[str, Any]: + logger(__name__).debug("Sending ext 0x19 command") if isinstance(vid, str): vid = self.vid_map[vid.lower()] - ui.logger(__name__).debug(f'Options: {dict(vid=vid, file_name=file_name)}') - tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) + ui.logger(__name__).debug(f"Options: {dict(vid=vid, file_name=file_name)}") + tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding="ascii")) rx = self._txrx_ext_struct(0x19, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x1C command') - tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) + def get_program_file_slot(self, file_name: str, vid: int = 1, options: int = 0) -> Dict[str, Any]: + logger(__name__).debug("Sending ext 0x1C command") + tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding="ascii")) ret = self._txrx_ext_struct(0x1C, tx_payload, " SystemStatus: from semantic_version import Version - logger(__name__).debug('Sending ext 0x22 command') + + logger(__name__).debug("Sending ext 0x22 command") version = self.query_system_version() - if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec('<1.0.13')) or \ - (version.product == V5Device.SystemVersion.Product.CONTROLLER and version.system_version in Spec('<1.0.0-0.70')): - schema = ' bytes: # read/write are the same command, behavior dictated by specifying # length-to-read as 0xFF and providing additional payload bytes to write or # specifying a length-to-read and no additional data to read. - logger(__name__).debug('Sending ext 0x27 command (read)') + logger(__name__).debug("Sending ext 0x27 command (read)") # specifying a length to read (0x40 bytes) with no additional payload data. - tx_payload = struct.pack("<2B", self.channel_map['download'], 0x40) + tx_payload = struct.pack("<2B", self.channel_map["download"], 0x40) # RX length isn't always 0x40 (end of buffer reached), so don't check_length. self._serial_cache += self._txrx_ext_packet(0x27, tx_payload, 0, check_length=False)[1:] - logger(__name__).debug('Completed ext 0x27 command (read)') + logger(__name__).debug("Completed ext 0x27 command (read)") # if _serial_cache doesn't have a \x00, pretend we didn't read anything. - if b'\x00' not in self._serial_cache: - return b'' + if b"\x00" not in self._serial_cache: + return b"" # _serial_cache has a \x00, split off the beginning part and hand it down. - parts = self._serial_cache.split(b'\x00') - ret = parts[0] + b'\x00' - self._serial_cache = b'\x00'.join(parts[1:]) + parts = self._serial_cache.split(b"\x00") + ret = parts[0] + b"\x00" + self._serial_cache = b"\x00".join(parts[1:]) return ret @@ -884,16 +997,16 @@ def user_fifo_read(self) -> bytes: def user_fifo_write(self, payload: Union[Iterable, bytes, bytearray, str]): # Not currently implemented return - logger(__name__).debug('Sending ext 0x27 command (write)') + logger(__name__).debug("Sending ext 0x27 command (write)") max_packet_size = 224 pl_len = len(payload) for i in range(0, pl_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > pl_len: packet_size = pl_len - i - logger(__name__).debug(f'Writing {packet_size} bytes to user FIFO') - self._txrx_ext_packet(0x27, b'\x01\x00' + payload[i:packet_size], 0, check_length=False)[1:] - logger(__name__).debug('Completed ext 0x27 command (write)') + logger(__name__).debug(f"Writing {packet_size} bytes to user FIFO") + self._txrx_ext_packet(0x27, b"\x01\x00" + payload[i:packet_size], 0, check_length=False)[1:] + logger(__name__).debug("Completed ext 0x27 command (write)") @retries def sc_init(self) -> None: @@ -901,43 +1014,46 @@ def sc_init(self) -> None: Send command to initialize screen capture """ # This will only copy data in memory, not send! - logger(__name__).debug('Sending ext 0x28 command') - self._txrx_ext_struct(0x28, [], '') - logger(__name__).debug('Completed ext 0x28 command') + logger(__name__).debug("Sending ext 0x28 command") + self._txrx_ext_struct(0x28, [], "") + logger(__name__).debug("Completed ext 0x28 command") @retries def kv_read(self, kv: str) -> bytearray: - logger(__name__).debug('Sending ext 0x2e command') - encoded_kv = f'{kv}\0'.encode(encoding='ascii') - tx_payload = struct.pack(f'<{len(encoded_kv)}s', encoded_kv) + logger(__name__).debug("Sending ext 0x2e command") + encoded_kv = f"{kv}\0".encode(encoding="ascii") + tx_payload = struct.pack(f"<{len(encoded_kv)}s", encoded_kv) # Because the length of the kernel variables is not known, use None to indicate we are recieving an unknown length. - ret = self._txrx_ext_packet(0x2e, tx_payload, 1, check_length=False, check_ack=True) - logger(__name__).debug('Completed ext 0x2e command') + ret = self._txrx_ext_packet(0x2E, tx_payload, 1, check_length=False, check_ack=True) + logger(__name__).debug("Completed ext 0x2e command") return ret @retries def kv_write(self, kv: str, payload: Union[Iterable, bytes, bytearray, str]): - logger(__name__).debug('Sending ext 0x2f command') - encoded_kv = f'{kv}\0'.encode(encoding='ascii') - kv_to_max_bytes = { - 'teamnumber': 7, - 'robotname': 16 - } + logger(__name__).debug("Sending ext 0x2f command") + encoded_kv = f"{kv}\0".encode(encoding="ascii") + kv_to_max_bytes = {"teamnumber": 7, "robotname": 16} if len(payload) > kv_to_max_bytes.get(kv, 254): - print(f'Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).') + print(f"Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).") # Trim down size of payload to fit within the 255 byte limit and add null terminator. - payload = payload[:kv_to_max_bytes.get(kv, 254)] + "\0" + payload = payload[: kv_to_max_bytes.get(kv, 254)] + "\0" if isinstance(payload, str): - payload = payload.encode(encoding='ascii') - tx_fmt = f'<{len(encoded_kv)}s{len(payload)}s' + payload = payload.encode(encoding="ascii") + tx_fmt = f"<{len(encoded_kv)}s{len(payload)}s" tx_payload = struct.pack(tx_fmt, encoded_kv, payload) - ret = self._txrx_ext_packet(0x2f, tx_payload, 1, check_length=False, check_ack=True) - logger(__name__).debug('Completed ext 0x2f command') + self._txrx_ext_packet(0x2F, tx_payload, 1, check_length=False, check_ack=True) + logger(__name__).debug("Completed ext 0x2f command") return payload - def _txrx_ext_struct(self, command: int, tx_data: Union[Iterable, bytes, bytearray], - unpack_fmt: str, check_length: bool = True, check_ack: bool = True, - timeout: Optional[float] = None) -> Tuple: + def _txrx_ext_struct( + self, + command: int, + tx_data: Union[Iterable, bytes, bytearray], + unpack_fmt: str, + check_length: bool = True, + check_ack: bool = True, + timeout: Optional[float] = None, + ) -> Tuple: """ Transmits and receives an extended command to the V5, automatically unpacking the values according to unpack_fmt which gets passed into struct.unpack. The size of the payload is determined from the fmt string @@ -949,14 +1065,21 @@ def _txrx_ext_struct(self, command: int, tx_data: Union[Iterable, bytes, bytearr :param check_ack: If true, then checks the first byte of the extended payload as an AK byte :return: A tuple unpacked according to the unpack_fmt """ - rx = self._txrx_ext_packet(command, tx_data, struct.calcsize(unpack_fmt), - check_length=check_length, check_ack=check_ack, timeout=timeout) - logger(__name__).debug('Unpacking with format: {}'.format(unpack_fmt)) + rx = self._txrx_ext_packet( + command, + tx_data, + struct.calcsize(unpack_fmt), + check_length=check_length, + check_ack=check_ack, + timeout=timeout, + ) + logger(__name__).debug("Unpacking with format: {}".format(unpack_fmt)) return struct.unpack(unpack_fmt, rx) @classmethod - def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: bool = True, - check_length: bool = True) -> Message: + def _rx_ext_packet( + cls, msg: Message, command: int, rx_length: int, check_ack: bool = True, check_length: bool = True + ) -> Message: """ Parse a received packet :param msg: data to parse @@ -966,11 +1089,11 @@ def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: b :param tx_payload: what was sent, used if an exception needs to be thrown :return: The payload of the extended message """ - assert (msg['command'] == 0x56) + assert msg["command"] == 0x56 if not cls.VEX_CRC16.compute(msg.rx) == 0: raise VEXCommError("CRC of message didn't match 0: {}".format(cls.VEX_CRC16.compute(msg.rx)), msg) - assert (msg['payload'][0] == command) - msg = msg['payload'][1:-2] + assert msg["payload"][0] == command + msg = msg["payload"][1:-2] if check_ack: nacks = { 0xFF: "General NACK", @@ -986,7 +1109,7 @@ def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: b 0xD8: "Data downloaded does not match initial length", 0xD9: "Directory entry does not exist", 0xDA: "Max user files, no more room for another user program", - 0xDB: "User file exists" + 0xDB: "User file exists", } if msg[0] in nacks.keys(): raise VEXCommError("Device NACK'd with reason: {}".format(nacks[msg[0]]), msg) @@ -994,17 +1117,24 @@ def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: b raise VEXCommError("Device didn't ACK", msg) msg = msg[1:] if len(msg) > 0: - logger(cls).debug('Set msg window to {}'.format(bytes_to_str(msg))) + logger(cls).debug("Set msg window to {}".format(bytes_to_str(msg))) if len(msg) < rx_length and check_length: - raise VEXCommError(f'Received length is less than {rx_length} (got {len(msg)}).', msg) + raise VEXCommError(f"Received length is less than {rx_length} (got {len(msg)}).", msg) elif len(msg) > rx_length and check_length: ui.echo( - f'WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()}).') + f"WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()})." + ) return msg - def _txrx_ext_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray], - rx_length: int, check_length: bool = True, - check_ack: bool = True, timeout: Optional[float] = None) -> Message: + def _txrx_ext_packet( + self, + command: int, + tx_data: Union[Iterable, bytes, bytearray], + rx_length: int, + check_length: bool = True, + check_ack: bool = True, + timeout: Optional[float] = None, + ) -> Message: """ Transmits and receives an extended command to the V5. :param command: Extended command code @@ -1024,13 +1154,13 @@ def _form_extended_payload(cls, msg: int, payload: Union[Iterable, bytes, bytear if payload is None: payload = bytearray() payload_length = len(payload) - assert payload_length <= 0x7f_ff + assert payload_length <= 0x7F_FF if payload_length >= 0x80: - payload_length = [(payload_length >> 8) | 0x80, payload_length & 0xff] + payload_length = [(payload_length >> 8) | 0x80, payload_length & 0xFF] else: payload_length = [payload_length] packet = bytearray([msg, *payload_length, *payload]) crc = cls.VEX_CRC16.compute(bytes([*cls._form_simple_packet(0x56), *packet])) - packet = bytearray([*packet, crc >> 8, crc & 0xff]) - assert (cls.VEX_CRC16.compute(bytes([*cls._form_simple_packet(0x56), *packet])) == 0) + packet = bytearray([*packet, crc >> 8, crc & 0xFF]) + assert cls.VEX_CRC16.compute(bytes([*cls._form_simple_packet(0x56), *packet])) == 0 return packet diff --git a/pros/serial/devices/vex/v5_user_device.py b/pros/serial/devices/vex/v5_user_device.py index be40d6b4..f6d88c9e 100644 --- a/pros/serial/devices/vex/v5_user_device.py +++ b/pros/serial/devices/vex/v5_user_device.py @@ -1,6 +1,7 @@ from typing import * from cobs import cobs + from pros.common.utils import logger from pros.serial.devices.stream_device import StreamDevice from pros.serial.ports import BasePort @@ -29,21 +30,21 @@ def promiscuous(self, value: bool): def write(self, data: Union[str, bytes]): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") self.port.write(data) def read(self) -> Tuple[bytes, bytes]: msg = None, None while msg[0] is None or (msg[0] not in self.topics and not self._accept_all): - while b'\0' not in self.buffer: + while b"\0" not in self.buffer: self.buffer.extend(self.port.read(1)) self.buffer.extend(self.port.read(-1)) - assert b'\0' in self.buffer - msg, self.buffer = self.buffer.split(b'\0', 1) + assert b"\0" in self.buffer + msg, self.buffer = self.buffer.split(b"\0", 1) try: msg = cobs.decode(msg) except cobs.DecodeError: - logger(__name__).warning(f'Could not decode bytes: {msg.hex()}') + logger(__name__).warning(f"Could not decode bytes: {msg.hex()}") assert len(msg) >= 4 msg = bytes(msg[:4]), bytes(msg[4:]) return msg diff --git a/pros/serial/devices/vex/vex_device.py b/pros/serial/devices/vex/vex_device.py index ff9862d4..261415e6 100644 --- a/pros/serial/devices/vex/vex_device.py +++ b/pros/serial/devices/vex/vex_device.py @@ -5,9 +5,10 @@ from pros.common import * from pros.serial import bytes_to_str from pros.serial.ports import BasePort + +from ..generic_device import GenericDevice from . import comm_error from .message import Message -from ..generic_device import GenericDevice def debug(msg): @@ -28,7 +29,7 @@ def query_system(self) -> bytearray: Verify that a VEX device is connected. Returned payload varies by product :return: Payload response """ - logger(__name__).debug('Sending simple 0x21 command') + logger(__name__).debug("Sending simple 0x21 command") return self._txrx_simple_packet(0x21, 0x0A) def _txrx_simple_struct(self, command: int, unpack_fmt: str, timeout: Optional[float] = None) -> Tuple: @@ -45,11 +46,11 @@ def _txrx_simple_packet(self, command: int, rx_len: int, timeout: Optional[float :return: They payload of the message, or raises and exception if there was an issue """ msg = self._txrx_packet(command, timeout=timeout) - if msg['command'] != command: - raise comm_error.VEXCommError('Received command does not match sent command.', msg) - if len(msg['payload']) != rx_len: + if msg["command"] != command: + raise comm_error.VEXCommError("Received command does not match sent command.", msg) + if len(msg["payload"]) != rx_len: raise comm_error.VEXCommError("Received data doesn't match expected length", msg) - return msg['payload'] + return msg["payload"] def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[int, bytes, bytearray], Any]]: # Optimized to read as quickly as possible w/o delay @@ -72,36 +73,35 @@ def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[i response_header_stack = bytearray(response_header) rx = bytearray() if not rx == bytearray(response_header): - raise IOError(f"Couldn't find the response header in the device response after {timeout} s. " - f"Got {rx.hex()} but was expecting {response_header.hex()}") + raise IOError( + f"Couldn't find the response header in the device response after {timeout} s. " + f"Got {rx.hex()} but was expecting {response_header.hex()}" + ) rx.extend(self.port.read(1)) command = rx[-1] rx.extend(self.port.read(1)) payload_length = rx[-1] if command == 0x56 and (payload_length & 0x80) == 0x80: - logger(__name__).debug('Found an extended message payload') + logger(__name__).debug("Found an extended message payload") rx.extend(self.port.read(1)) - payload_length = ((payload_length & 0x7f) << 8) + rx[-1] + payload_length = ((payload_length & 0x7F) << 8) + rx[-1] payload = self.port.read(payload_length) rx.extend(payload) - return { - 'command': command, - 'payload': payload, - 'raw': rx - } + return {"command": command, "payload": payload, "raw": rx} def _tx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None): tx = self._form_simple_packet(command) if tx_data is not None: tx = bytes([*tx, *tx_data]) - logger(__name__).debug(f'{self.__class__.__name__} TX: {bytes_to_str(tx)}') + logger(__name__).debug(f"{self.__class__.__name__} TX: {bytes_to_str(tx)}") self.port.read_all() self.port.write(tx) self.port.flush() return tx - def _txrx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None, - timeout: Optional[float] = None) -> Message: + def _txrx_packet( + self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None, timeout: Optional[float] = None + ) -> Message: """ Goes through a send/receive cycle with a VEX device. Transmits the command with the optional additional payload, then reads and parses the outer layer @@ -113,12 +113,12 @@ def _txrx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, """ tx = self._tx_packet(command, tx_data) rx = self._rx_packet(timeout=timeout) - msg = Message(rx['raw'], tx) + msg = Message(rx["raw"], tx) logger(__name__).debug(msg) - msg['payload'] = Message(rx['raw'], tx, internal_rx=rx['payload']) - msg['command'] = rx['command'] + msg["payload"] = Message(rx["raw"], tx, internal_rx=rx["payload"]) + msg["command"] = rx["command"] return msg @staticmethod def _form_simple_packet(msg: int) -> bytearray: - return bytearray([0xc9, 0x36, 0xb8, 0x47, msg]) + return bytearray([0xC9, 0x36, 0xB8, 0x47, msg]) diff --git a/pros/serial/interactive/UploadProjectModal.py b/pros/serial/interactive/UploadProjectModal.py index f14dde7e..7b0b7702 100644 --- a/pros/serial/interactive/UploadProjectModal.py +++ b/pros/serial/interactive/UploadProjectModal.py @@ -14,14 +14,14 @@ class UploadProjectModal(application.Modal[None]): def __init__(self, project: Optional[Project]): - super(UploadProjectModal, self).__init__('Upload Project', confirm_button='Upload') + super(UploadProjectModal, self).__init__("Upload Project", confirm_button="Upload") self.project: Optional[Project] = project self.project_path = ExistingProjectParameter( - str(project.location) if project else os.path.join(os.path.expanduser('~'), 'My PROS Project') + str(project.location) if project else os.path.join(os.path.expanduser("~"), "My PROS Project") ) - self.port = parameters.OptionParameter('', ['']) + self.port = parameters.OptionParameter("", [""]) self.save_settings = parameters.BooleanParameter(True) self.advanced_options: Dict[str, parameters.Parameter] = {} self.advanced_options_collapsed = parameters.BooleanParameter(True) @@ -40,53 +40,51 @@ def cleanup_poll_comports_thread(): cb(self.project_path) def update_slots(self): - assert self.project.target == 'v5' + assert self.project.target == "v5" if self.port.is_valid() and bool(self.port.value): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + device = V5Device(DirectPort(self.port.value)) slot_options = [ - f'{slot}' + ('' if program is None else f' (Currently: {program})') - for slot, program in - device.used_slots().items() + f"{slot}" + ("" if program is None else f" (Currently: {program})") + for slot, program in device.used_slots().items() ] else: slot_options = [str(i) for i in range(1, 9)] - project_name = self.advanced_options['name'].value - if 'slot' in self.project.upload_options: + project_name = self.advanced_options["name"].value + if "slot" in self.project.upload_options: # first, see if the project has it specified in its upload options - selected = slot_options[self.project.upload_options['slot'] - 1] + selected = slot_options[self.project.upload_options["slot"] - 1] else: # otherwise, try to do a name match - matched_slots = [i for i, slot in enumerate(slot_options) if slot.endswith(f'{project_name})')] + matched_slots = [i for i, slot in enumerate(slot_options) if slot.endswith(f"{project_name})")] if len(matched_slots) > 0: selected = slot_options[matched_slots[0]] - elif 'slot' in self.advanced_options: + elif "slot" in self.advanced_options: # or whatever the last value was - selected = slot_options[int(self.advanced_options['slot'].value[0]) - 1] + selected = slot_options[int(self.advanced_options["slot"].value[0]) - 1] else: # or just slot 1 selected = slot_options[0] - self.advanced_options['slot'] = parameters.OptionParameter( - selected, slot_options - ) + self.advanced_options["slot"] = parameters.OptionParameter(selected, slot_options) def update_comports(self): list_all_comports.cache_clear() if isinstance(self.project, Project): options = {} - if self.project.target == 'v5': - options = {p.device for p in find_v5_ports('system')} - elif self.project.target == 'cortex': + if self.project.target == "v5": + options = {p.device for p in find_v5_ports("system")} + elif self.project.target == "cortex": options = [p.device for p in find_cortex_ports()] if options != {*self.port.options}: self.port.options = list(options) if self.port.value not in options: - self.port.update(self.port.options[0] if len(self.port.options) > 0 else 'No ports found') - ui.logger(__name__).debug('Updating ports') + self.port.update(self.port.options[0] if len(self.port.options) > 0 else "No ports found") + ui.logger(__name__).debug("Updating ports") - if self.project and self.project.target == 'v5': + if self.project and self.project.target == "v5": self.update_slots() self.redraw() @@ -102,15 +100,13 @@ def project_changed(self, new_project: ExistingProjectParameter): assert self.project is not None - if self.project.target == 'v5': + if self.project.target == "v5": self.advanced_options = { - 'name': parameters.Parameter(self.project.upload_options.get('remote_name', self.project.name)), - 'description': parameters.Parameter( - self.project.upload_options.get('description', 'Created with PROS') + "name": parameters.Parameter(self.project.upload_options.get("remote_name", self.project.name)), + "description": parameters.Parameter( + self.project.upload_options.get("description", "Created with PROS") ), - 'compress_bin': parameters.BooleanParameter( - self.project.upload_options.get('compress_bin', True) - ) + "compress_bin": parameters.BooleanParameter(self.project.upload_options.get("compress_bin", True)), } self.update_slots() else: @@ -123,16 +119,18 @@ def project_changed(self, new_project: ExistingProjectParameter): ui.logger(__name__).exception(e) def confirm(self, *args, **kwargs): - from pros.cli.upload import upload from click import get_current_context - kwargs = {'path': None, 'project': self.project, 'port': self.port.value} + + from pros.cli.upload import upload + + kwargs = {"path": None, "project": self.project, "port": self.port.value} savable_kwargs = {} - if self.project.target == 'v5': - savable_kwargs['remote_name'] = self.advanced_options['name'].value + if self.project.target == "v5": + savable_kwargs["remote_name"] = self.advanced_options["name"].value # XXX: the first character is the slot number - savable_kwargs['slot'] = int(self.advanced_options['slot'].value[0]) - savable_kwargs['description'] = self.advanced_options['description'].value - savable_kwargs['compress_bin'] = self.advanced_options['compress_bin'].value + savable_kwargs["slot"] = int(self.advanced_options["slot"].value[0]) + savable_kwargs["description"] = self.advanced_options["description"].value + savable_kwargs["compress_bin"] = self.advanced_options["compress_bin"].value if self.save_settings.value: self.project.upload_options.update(savable_kwargs) @@ -145,9 +143,7 @@ def confirm(self, *args, **kwargs): @property def can_confirm(self): advanced_valid = all( - p.is_valid() - for p in self.advanced_options.values() - if isinstance(p, parameters.ValidatableParameter) + p.is_valid() for p in self.advanced_options.values() if isinstance(p, parameters.ValidatableParameter) ) return self.project is not None and self.port.is_valid() and advanced_valid @@ -156,15 +152,16 @@ def build(self) -> Generator[components.Component, None, None]: self.poll_comports_thread = Thread(target=with_click_context(self.poll_comports)) self.poll_comports_thread.start() - yield components.DirectorySelector('Project Directory', self.project_path) - yield components.DropDownBox('Port', self.port) - yield components.Checkbox('Save upload settings', self.save_settings) + yield components.DirectorySelector("Project Directory", self.project_path) + yield components.DropDownBox("Port", self.port) + yield components.Checkbox("Save upload settings", self.save_settings) - if isinstance(self.project, Project) and self.project.target == 'v5': + if isinstance(self.project, Project) and self.project.target == "v5": yield components.Container( - components.InputBox('Program Name', self.advanced_options['name']), - components.DropDownBox('Slot', self.advanced_options['slot']), - components.InputBox('Description', self.advanced_options['description']), - components.Checkbox('Compress Binary', self.advanced_options['compress_bin']), - title='Advanced V5 Options', - collapsed=self.advanced_options_collapsed) + components.InputBox("Program Name", self.advanced_options["name"]), + components.DropDownBox("Slot", self.advanced_options["slot"]), + components.InputBox("Description", self.advanced_options["description"]), + components.Checkbox("Compress Binary", self.advanced_options["compress_bin"]), + title="Advanced V5 Options", + collapsed=self.advanced_options_collapsed, + ) diff --git a/pros/serial/interactive/__init__.py b/pros/serial/interactive/__init__.py index aa7f4062..ec961c20 100644 --- a/pros/serial/interactive/__init__.py +++ b/pros/serial/interactive/__init__.py @@ -1,3 +1,3 @@ from .UploadProjectModal import UploadProjectModal -__all__ = ['UploadProjectModal'] +__all__ = ["UploadProjectModal"] diff --git a/pros/serial/ports/__init__.py b/pros/serial/ports/__init__.py index be344a79..a880d536 100644 --- a/pros/serial/ports/__init__.py +++ b/pros/serial/ports/__init__.py @@ -1,15 +1,17 @@ from functools import lru_cache +from serial.tools import list_ports + from pros.common import logger -from serial.tools import list_ports as list_ports from .base_port import BasePort, PortConnectionException, PortException from .direct_port import DirectPort + # from .v5_wireless_port import V5WirelessPort @lru_cache() def list_all_comports(): ports = list_ports.comports() - logger(__name__).debug('Connected: {}'.format(';'.join([str(p.__dict__) for p in ports]))) + logger(__name__).debug("Connected: {}".format(";".join([str(p.__dict__) for p in ports]))) return ports diff --git a/pros/serial/ports/direct_port.py b/pros/serial/ports/direct_port.py index fa225f54..d18d36e2 100644 --- a/pros/serial/ports/direct_port.py +++ b/pros/serial/ports/direct_port.py @@ -3,34 +3,37 @@ import serial -from pros.common import logger, dont_send +from pros.common import dont_send, logger from pros.serial.ports.exceptions import ConnectionRefusedException, PortNotFoundException + from .base_port import BasePort, PortConnectionException def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial.Serial: try: - logger(__name__).debug(f'Opening serial port {port_name}') - port = serial.Serial(port_name, baudrate=115200, bytesize=serial.EIGHTBITS, - parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE) + logger(__name__).debug(f"Opening serial port {port_name}") + port = serial.Serial( + port_name, + baudrate=115200, + bytesize=serial.EIGHTBITS, + parity=serial.PARITY_NONE, + stopbits=serial.STOPBITS_ONE, + ) port.timeout = timeout port.inter_byte_timeout = 0.2 return port except serial.SerialException as e: - if any(msg in str(e) for msg in [ - 'Access is denied', 'Errno 16', 'Errno 13' - ]): + if any(msg in str(e) for msg in ["Access is denied", "Errno 16", "Errno 13"]): tb = sys.exc_info()[2] raise dont_send(ConnectionRefusedException(port_name, e).with_traceback(tb)) else: raise dont_send(PortNotFoundException(port_name, e)) - class DirectPort(BasePort): def __init__(self, port_name: str, **kwargs): - self.serial: serial.Serial = create_serial_port(port_name=port_name, timeout=kwargs.pop('timeout', 1.0)) + self.serial: serial.Serial = create_serial_port(port_name=port_name, timeout=kwargs.pop("timeout", 1.0)) self.buffer: bytearray = bytearray() def read(self, n_bytes: int = 0) -> bytes: @@ -55,14 +58,14 @@ def read(self, n_bytes: int = 0) -> bytes: def write(self, data: Union[str, bytes]): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") self.serial.write(data) def flush(self): self.serial.flush() def destroy(self): - logger(__name__).debug(f'Destroying {self.__class__.__name__} to {self.serial.name}') + logger(__name__).debug(f"Destroying {self.__class__.__name__} to {self.serial.name}") self.serial.close() @property diff --git a/pros/serial/ports/exceptions.py b/pros/serial/ports/exceptions.py index cd3f0bca..44e63f30 100644 --- a/pros/serial/ports/exceptions.py +++ b/pros/serial/ports/exceptions.py @@ -1,18 +1,23 @@ import os + import serial + class ConnectionRefusedException(IOError): def __init__(self, port_name: str, reason: Exception): self.__cause__ = reason self.port_name = port_name def __str__(self): - extra = '' - if os.name == 'posix': - extra = 'adding yourself to dialout group ' - return f"could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " \ - f"firmware utilities; moving to a different USB port; {extra}or " \ + extra = "" + if os.name == "posix": + extra = "adding yourself to dialout group " + return ( + f"could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " + f"firmware utilities; moving to a different USB port; {extra}or " f"restarting the device." + ) + class PortNotFoundException(serial.SerialException): def __init__(self, port_name: str, reason: Exception): @@ -20,11 +25,11 @@ def __init__(self, port_name: str, reason: Exception): self.port_name = port_name def __str__(self): - extra = '' - if os.name == 'posix': - extra = 'adding yourself to dialout group ' - return f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " \ - f"firmware utilities; moving to a different USB port; {extra}or " \ + extra = "" + if os.name == "posix": + extra = "adding yourself to dialout group " + return ( + f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " + f"firmware utilities; moving to a different USB port; {extra}or " f"restarting the device." - - + ) diff --git a/pros/serial/ports/serial_share_bridge.py b/pros/serial/ports/serial_share_bridge.py index b632a5dc..b06827c6 100644 --- a/pros/serial/ports/serial_share_bridge.py +++ b/pros/serial/ports/serial_share_bridge.py @@ -5,27 +5,33 @@ import zmq from cobs import cobs + from pros.common.utils import * -from .direct_port import DirectPort from .. import bytes_to_str +from .direct_port import DirectPort def get_port_num(serial_port_name: str, hash: str) -> int: - return sum("Powered by PROS: {}-{}".format(serial_port_name, hash).encode(encoding='ascii')) + return sum("Powered by PROS: {}-{}".format(serial_port_name, hash).encode(encoding="ascii")) def get_from_device_port_num(serial_port_name: str) -> int: - return get_port_num(serial_port_name, 'from') + return get_port_num(serial_port_name, "from") def get_to_device_port_num(serial_port_name: str) -> int: - return get_port_num(serial_port_name, 'to') + return get_port_num(serial_port_name, "to") class SerialShareBridge(object): - def __init__(self, serial_port_name: str, base_addr: str = '127.0.0.1', - to_device_port_num: int = None, from_device_port_num: int = None): + def __init__( + self, + serial_port_name: str, + base_addr: str = "127.0.0.1", + to_device_port_num: int = None, + from_device_port_num: int = None, + ): self._serial_port_name = serial_port_name self._base_addr = base_addr if to_device_port_num is None: @@ -50,16 +56,16 @@ def from_device_port_num(self): def start(self): # this function is still in the parent process - mp_ctx = multiprocessing.get_context('spawn') + mp_ctx = multiprocessing.get_context("spawn") barrier = multiprocessing.Barrier(3) - task = mp_ctx.Process(target=self._start, name='Serial Share Bridge', args=(barrier,)) + task = mp_ctx.Process(target=self._start, name="Serial Share Bridge", args=(barrier,)) task.daemon = False task.start() barrier.wait(1) return task def kill(self, do_join: bool = False): - logger(__name__).info('Killing serial share server due to watchdog') + logger(__name__).info("Killing serial share server due to watchdog") self.dying.set() self.port.destroy() if not self.zmq_ctx.closed: @@ -72,25 +78,28 @@ def kill(self, do_join: bool = False): def _start(self, initialization_barrier: multiprocessing.Barrier): try: - log_dir = os.path.join(get_pros_dir(), 'logs') + log_dir = os.path.join(get_pros_dir(), "logs") os.makedirs(log_dir, exist_ok=True) pros_logger = logging.getLogger(pros.__name__) pros_logger.setLevel(logging.DEBUG) - log_file_name = os.path.join(get_pros_dir(), 'logs', 'serial-share-bridge.log') + log_file_name = os.path.join(get_pros_dir(), "logs", "serial-share-bridge.log") handler = logging.handlers.TimedRotatingFileHandler(log_file_name, backupCount=1) handler.setLevel(logging.DEBUG) - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})' \ - .format(self._serial_port_name) + fmt_str = "%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})".format( + self._serial_port_name + ) handler.setFormatter(logging.Formatter(fmt_str)) pros_logger.addHandler(handler) self.zmq_ctx = zmq.Context() # timeout is none, so blocks indefinitely. Helps reduce CPU usage when there's nothing being recv self.port = DirectPort(self._serial_port_name, timeout=None) - self.from_device_thread = threading.Thread(target=self._from_device_loop, name='From Device Reader', - daemon=False, args=(initialization_barrier,)) - self.to_device_thread = threading.Thread(target=self._to_device_loop, name='To Device Reader', - daemon=False, args=(initialization_barrier,)) + self.from_device_thread = threading.Thread( + target=self._from_device_loop, name="From Device Reader", daemon=False, args=(initialization_barrier,) + ) + self.to_device_thread = threading.Thread( + target=self._to_device_loop, name="To Device Reader", daemon=False, args=(initialization_barrier,) + ) self.dying = threading.Event() # type: threading.Event self.from_device_thread.start() self.to_device_thread.start() @@ -98,8 +107,11 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): while not self.dying.wait(10000): pass - logger(__name__).info('Main serial share bridge thread is dying. Everything else should be dead: {}'.format( - threading.active_count() - 1)) + logger(__name__).info( + "Main serial share bridge thread is dying. Everything else should be dead: {}".format( + threading.active_count() - 1 + ) + ) self.kill(do_join=True) except Exception as e: initialization_barrier.abort() @@ -110,9 +122,9 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): rxd = 0 try: from_ser_sock = self.zmq_ctx.socket(zmq.PUB) - addr = 'tcp://{}:{}'.format(self._base_addr, self._from_port_num) + addr = "tcp://{}:{}".format(self._base_addr, self._from_port_num) from_ser_sock.bind(addr) - logger(__name__).info('Bound from device broadcaster as a publisher to {}'.format(addr)) + logger(__name__).info("Bound from device broadcaster as a publisher to {}".format(addr)) initialization_barrier.wait() buffer = bytearray() while not self.dying.is_set(): @@ -121,25 +133,31 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): # then read everything available buffer.extend(self.port.read(1)) buffer.extend(self.port.read(-1)) - while b'\0' in buffer and not self.dying.is_set(): - msg, buffer = buffer.split(b'\0', 1) + while b"\0" in buffer and not self.dying.is_set(): + msg, buffer = buffer.split(b"\0", 1) msg = cobs.decode(msg) from_ser_sock.send_multipart((msg[:4], msg[4:])) rxd += 1 time.sleep(0) except Exception as e: # TODO: when getting a COBS decode error, rebroadcast the bytes on sout - logger(__name__).error('Unexpected error handling {}'.format(bytes_to_str(msg[:-1]))) + logger(__name__).error("Unexpected error handling {}".format(bytes_to_str(msg[:-1]))) logger(__name__).exception(e) errors += 1 - logger(__name__).info('Current from device broadcasting error rate: {} errors. {} successful. {}%' - .format(errors, rxd, errors / (errors + rxd))) + logger(__name__).info( + "Current from device broadcasting error rate: {} errors. {} successful. {}%".format( + errors, rxd, errors / (errors + rxd) + ) + ) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) - logger(__name__).warning('From Device Broadcaster is dying now.') - logger(__name__).info('Current from device broadcasting error rate: {} errors. {} successful. {}%' - .format(errors, rxd, errors / (errors + rxd))) + logger(__name__).warning("From Device Broadcaster is dying now.") + logger(__name__).info( + "Current from device broadcasting error rate: {} errors. {} successful. {}%".format( + errors, rxd, errors / (errors + rxd) + ) + ) try: self.kill(do_join=False) except: @@ -148,10 +166,10 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): def _to_device_loop(self, initialization_barrier: multiprocessing.Barrier): try: to_ser_sock = self.zmq_ctx.socket(zmq.SUB) - addr = 'tcp://{}:{}'.format(self._base_addr, self._to_port_num) + addr = "tcp://{}:{}".format(self._base_addr, self._to_port_num) to_ser_sock.bind(addr) - to_ser_sock.setsockopt(zmq.SUBSCRIBE, b'') - logger(__name__).info('Bound to device broadcaster as a subscriber to {}'.format(addr)) + to_ser_sock.setsockopt(zmq.SUBSCRIBE, b"") + logger(__name__).info("Bound to device broadcaster as a subscriber to {}".format(addr)) watchdog = threading.Timer(10, self.kill) initialization_barrier.wait() watchdog.start() @@ -159,18 +177,18 @@ def _to_device_loop(self, initialization_barrier: multiprocessing.Barrier): msg = to_ser_sock.recv_multipart() if not msg or self.dying.is_set(): continue - if msg[0] == b'kick': - logger(__name__).debug('Kicking watchdog on server {}'.format(threading.current_thread())) + if msg[0] == b"kick": + logger(__name__).debug("Kicking watchdog on server {}".format(threading.current_thread())) watchdog.cancel() watchdog = threading.Timer(msg[1][1] if len(msg) > 1 and len(msg[1]) > 0 else 5, self.kill) watchdog.start() - elif msg[0] == b'send': - logger(self).debug('Writing {} to {}'.format(bytes_to_str(msg[1]), self.port.port_name)) + elif msg[0] == b"send": + logger(self).debug("Writing {} to {}".format(bytes_to_str(msg[1]), self.port.port_name)) self.port.write(msg[1]) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) - logger(__name__).warning('To Device Broadcaster is dying now.') + logger(__name__).warning("To Device Broadcaster is dying now.") try: self.kill(do_join=False) except: diff --git a/pros/serial/ports/serial_share_port.py b/pros/serial/ports/serial_share_port.py index f329ac7e..1a9df09a 100644 --- a/pros/serial/ports/serial_share_port.py +++ b/pros/serial/ports/serial_share_port.py @@ -3,8 +3,14 @@ class SerialSharePort(BasePort): - def __init__(self, port_name: str, topic: bytes = b'sout', addr: str = '127.0.0.1', - to_device_port: int = None, from_device_port: int = None): + def __init__( + self, + port_name: str, + topic: bytes = b"sout", + addr: str = "127.0.0.1", + to_device_port: int = None, + from_device_port: int = None, + ): self.port_name = port_name self.topic = topic self._base_addr = addr @@ -23,18 +29,20 @@ def __init__(self, port_name: str, topic: bytes = b'sout', addr: str = '127.0.0. self.from_device_sock = self.ctx.socket(zmq.SUB) # type: zmq.Socket self.from_device_sock.setsockopt(zmq.SUBSCRIBE, self.topic) - self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b'kdbg') - self.from_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._from_port_num)) + self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b"kdbg") + self.from_device_sock.connect("tcp://{}:{}".format(self._base_addr, self._from_port_num)) logger(__name__).info( - 'Connected from device as a subscriber on tcp://{}:{}'.format(self._base_addr, self._from_port_num)) + "Connected from device as a subscriber on tcp://{}:{}".format(self._base_addr, self._from_port_num) + ) self.to_device_sock = self.ctx.socket(zmq.PUB) # type: zmq.Socket - self.to_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._to_port_num)) + self.to_device_sock.connect("tcp://{}:{}".format(self._base_addr, self._to_port_num)) logger(__name__).info( - 'Connected to device as a publisher on tcp://{}:{}'.format(self._base_addr, self._to_port_num)) + "Connected to device as a publisher on tcp://{}:{}".format(self._base_addr, self._to_port_num) + ) self.alive = threading.Event() - self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name='Client Kicker') + self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name="Client Kicker") self.watchdog_thread.start() def read(self, n_bytes: int = -1): @@ -50,22 +58,22 @@ def read_packet(self): def write(self, data: AnyStr): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") assert isinstance(data, bytes) - self.to_device_sock.send_multipart([b'send', data]) + self.to_device_sock.send_multipart([b"send", data]) def subscribe(self, topic: bytes): assert len(topic) == 4 - self.write(bytearray([*b'pRe', *topic])) + self.write(bytearray([*b"pRe", *topic])) self.from_device_sock.subscribe(topic=topic) def unsubscribe(self, topic: bytes): assert len(topic) == 4 - self.write(bytearray([*b'pRd', *topic])) + self.write(bytearray([*b"pRd", *topic])) self.from_device_sock.unsubscribe(topic=topic) def destroy(self): - logger(__name__).info('Destroying {}'.format(self)) + logger(__name__).info("Destroying {}".format(self)) self.alive.set() if self.watchdog_thread.is_alive(): self.watchdog_thread.join() @@ -77,7 +85,7 @@ def destroy(self): def _kick_watchdog(self): time.sleep(0.5) while not self.alive.is_set(): - logger(__name__).debug('Kicking server from {}'.format(threading.current_thread())) - self.to_device_sock.send_multipart([b'kick']) + logger(__name__).debug("Kicking server from {}".format(threading.current_thread())) + self.to_device_sock.send_multipart([b"kick"]) self.alive.wait(2.5) - logger(__name__).info('Watchdog kicker is dying') + logger(__name__).info("Watchdog kicker is dying") diff --git a/pros/serial/ports/v5_wireless_port.py b/pros/serial/ports/v5_wireless_port.py index 80d4717d..dc25c259 100644 --- a/pros/serial/ports/v5_wireless_port.py +++ b/pros/serial/ports/v5_wireless_port.py @@ -1,36 +1,36 @@ -from typing import * - -from pros.serial.devices.vex.v5_device import V5Device -from pros.serial.ports import BasePort, DirectPort - - -class V5WirelessPort(BasePort): - def __init__(self, port): - self.buffer: bytearray = bytearray() - - self.port_instance = DirectPort(port) - self.device = V5Device(self.port_instance) - self.download_channel = self.device.DownloadChannel(self.device) - self.download_channel.__enter__() - - def destroy(self): - self.port_instance.destroy() - self.download_channel.__exit__() - - def config(self, command: str, argument: Any): - return self.port_instance.config(command, argument) - - # TODO: buffer input? technically this is done by the user_fifo_write cmd blocking until whole input is written? - def write(self, data: bytes): - self.device.user_fifo_write(data) - - def read(self, n_bytes: int = 0) -> bytes: - if n_bytes > len(self.buffer): - self.buffer.extend(self.device.user_fifo_read()) - ret = self.buffer[:n_bytes] - self.buffer = self.buffer[n_bytes:] - return ret - - @property - def name(self) -> str: - return self.port_instance.name +from typing import * + +from pros.serial.devices.vex.v5_device import V5Device +from pros.serial.ports import BasePort, DirectPort + + +class V5WirelessPort(BasePort): + def __init__(self, port): + self.buffer: bytearray = bytearray() + + self.port_instance = DirectPort(port) + self.device = V5Device(self.port_instance) + self.download_channel = self.device.DownloadChannel(self.device) + self.download_channel.__enter__() + + def destroy(self): + self.port_instance.destroy() + self.download_channel.__exit__() + + def config(self, command: str, argument: Any): + return self.port_instance.config(command, argument) + + # TODO: buffer input? technically this is done by the user_fifo_write cmd blocking until whole input is written? + def write(self, data: bytes): + self.device.user_fifo_write(data) + + def read(self, n_bytes: int = 0) -> bytes: + if n_bytes > len(self.buffer): + self.buffer.extend(self.device.user_fifo_read()) + ret = self.buffer[:n_bytes] + self.buffer = self.buffer[n_bytes:] + return ret + + @property + def name(self) -> str: + return self.port_instance.name diff --git a/pros/serial/terminal/terminal.py b/pros/serial/terminal/terminal.py index a0c78264..4bd805f7 100644 --- a/pros/serial/terminal/terminal.py +++ b/pros/serial/terminal/terminal.py @@ -11,7 +11,6 @@ from pros.serial.devices import StreamDevice from pros.serial.ports import PortConnectionException - # This file is a modification of the miniterm implementation on pyserial @@ -60,10 +59,9 @@ def __exit__(self, *args, **kwargs): self.setup() -if os.name == 'nt': # noqa - import msvcrt +if os.name == "nt": # noqa import ctypes - + import msvcrt class Out(object): """file-like wrapper that uses os.write""" @@ -77,7 +75,6 @@ def flush(self): def write(self, s): os.write(self.fd, s) - class Console(ConsoleBase): def __init__(self): super(Console, self).__init__() @@ -104,7 +101,7 @@ def getkey(self): z = msvcrt.getwch() if z == chr(13): return chr(10) - elif z in (chr(0), chr(0x0e)): # functions keys, ignore + elif z in (chr(0), chr(0x0E)): # functions keys, ignore msvcrt.getwch() else: return z @@ -113,13 +110,12 @@ def cancel(self): # CancelIo, CancelSynchronousIo do not seem to work when using # getwch, so instead, send a key to the window with the console hwnd = ctypes.windll.kernel32.GetConsoleWindow() - ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0d, 0) + ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0D, 0) -elif os.name == 'posix': +elif os.name == "posix": import atexit - import termios import select - + import termios class Console(ConsoleBase): def __init__(self): @@ -131,8 +127,7 @@ def __init__(self): self.old = termios.tcgetattr(self.fd) atexit.register(self.cleanup) if sys.version_info < (3, 0): - self.enc_stdin = codecs. \ - getreader(sys.stdin.encoding)(sys.stdin) + self.enc_stdin = codecs.getreader(sys.stdin.encoding)(sys.stdin) else: self.enc_stdin = sys.stdin @@ -144,13 +139,12 @@ def setup(self): termios.tcsetattr(self.fd, termios.TCSANOW, new) def getkey(self): - ready, _, _ = select.select([self.enc_stdin, self.pipe_r], [], - [], None) + ready, _, _ = select.select([self.enc_stdin, self.pipe_r], [], [], None) if self.pipe_r in ready: os.read(self.pipe_r, 1) return c = self.enc_stdin.read(1) - if c == chr(0x7f): + if c == chr(0x7F): c = chr(8) # map the BS key (which yields DEL) to backspace return c @@ -161,19 +155,18 @@ def cleanup(self): termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old) else: - raise NotImplementedError( - 'Sorry no implementation for your platform ({})' - ' available.'.format(sys.platform)) + raise NotImplementedError("Sorry no implementation for your platform ({})" " available.".format(sys.platform)) class Terminal(object): """This class is loosely based off of the pyserial miniterm""" - def __init__(self, port_instance: StreamDevice, transformations=(), - output_raw: bool = False, request_banner: bool = True): + def __init__( + self, port_instance: StreamDevice, transformations=(), output_raw: bool = False, request_banner: bool = True + ): self.device = port_instance - self.device.subscribe(b'sout') - self.device.subscribe(b'serr') + self.device.subscribe(b"sout") + self.device.subscribe(b"serr") self.transformations = transformations self._reader_alive = None self.receiver_thread = None # type: threading.Thread @@ -189,8 +182,7 @@ def __init__(self, port_instance: StreamDevice, transformations=(), def _start_rx(self): self._reader_alive = True - self.receiver_thread = threading.Thread(target=self.reader, - name='serial-rx-term') + self.receiver_thread = threading.Thread(target=self.reader, name="serial-rx-term") self.receiver_thread.daemon = True self.receiver_thread.start() @@ -200,8 +192,7 @@ def _stop_rx(self): def _start_tx(self): self._transmitter_alive = True - self.transmitter_thread = threading.Thread(target=self.transmitter, - name='serial-tx-term') + self.transmitter_thread = threading.Thread(target=self.transmitter, name="serial-tx-term") self.transmitter_thread.daemon = True self.transmitter_thread.start() @@ -213,7 +204,7 @@ def _stop_tx(self): def reader(self): if self.request_banner: try: - self.device.write(b'pRb') + self.device.write(b"pRb") except Exception as e: logger(__name__).exception(e) try: @@ -221,23 +212,25 @@ def reader(self): data = self.device.read() if not data: continue - if data[0] == b'sout': + if data[0] == b"sout": text = decode_bytes_to_str(data[1]) - elif data[0] == b'serr': - text = '{}{}{}'.format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) - elif data[0] == b'kdbg': - text = '{}\n\nKERNEL DEBUG:\t{}{}\n'.format(colorama.Back.GREEN + colorama.Style.BRIGHT, - decode_bytes_to_str(data[1]), - colorama.Style.RESET_ALL) - elif data[0] != b'': - text = '{}{}'.format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) + elif data[0] == b"serr": + text = "{}{}{}".format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) + elif data[0] == b"kdbg": + text = "{}\n\nKERNEL DEBUG:\t{}{}\n".format( + colorama.Back.GREEN + colorama.Style.BRIGHT, + decode_bytes_to_str(data[1]), + colorama.Style.RESET_ALL, + ) + elif data[0] != b"": + text = "{}{}".format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) else: text = "{}".format(decode_bytes_to_str(data[1])) self.console.write(text) except UnicodeError as e: logger(__name__).exception(e) except PortConnectionException: - logger(__name__).warning(f'Connection to {self.device.name} broken') + logger(__name__).warning(f"Connection to {self.device.name} broken") if not self.alive.is_set(): self.stop() except Exception as e: @@ -246,7 +239,7 @@ def reader(self): else: logger(__name__).debug(e) self.stop() - logger(__name__).info('Terminal receiver dying') + logger(__name__).info("Terminal receiver dying") def transmitter(self): try: @@ -254,14 +247,14 @@ def transmitter(self): try: c = self.console.getkey() except KeyboardInterrupt: - c = '\x03' + c = "\x03" if self.alive.is_set(): break - if c == '\x03' or not self.no_sigint: + if c == "\x03" or not self.no_sigint: self.stop() break else: - self.device.write(c.encode(encoding='utf-8')) + self.device.write(c.encode(encoding="utf-8")) self.console.write(c) except Exception as e: if not self.alive.is_set(): @@ -269,7 +262,7 @@ def transmitter(self): else: logger(__name__).debug(e) self.stop() - logger(__name__).info('Terminal transmitter dying') + logger(__name__).info("Terminal transmitter dying") def catch_sigint(self): self.no_sigint = False @@ -284,13 +277,13 @@ def start(self): def stop(self, *args): self.console.cleanup() if not self.alive.is_set(): - logger(__name__).warning('Stopping terminal') + logger(__name__).warning("Stopping terminal") self.alive.set() self.device.destroy() if threading.current_thread() != self.transmitter_thread and self.transmitter_thread.is_alive(): self.console.cleanup() self.console.cancel() - logger(__name__).info('All done!') + logger(__name__).info("All done!") def join(self): try: diff --git a/pros/upgrade/__init__.py b/pros/upgrade/__init__.py index 9794ad32..4c546227 100644 --- a/pros/upgrade/__init__.py +++ b/pros/upgrade/__init__.py @@ -5,4 +5,4 @@ def get_platformv2(): return UpgradeManifestV2().platform -__all__ = ['UpgradeManager', 'get_platformv2'] +__all__ = ["UpgradeManager", "get_platformv2"] diff --git a/pros/upgrade/instructions/__init__.py b/pros/upgrade/instructions/__init__.py index 26d62f32..452e2915 100644 --- a/pros/upgrade/instructions/__init__.py +++ b/pros/upgrade/instructions/__init__.py @@ -1,6 +1,6 @@ from .base_instructions import UpgradeInstruction, UpgradeResult -from .nothing_instructions import NothingInstruction from .download_instructions import DownloadInstruction from .explorer_instructions import ExplorerInstruction +from .nothing_instructions import NothingInstruction -__all__ = ['UpgradeInstruction', 'UpgradeResult', 'NothingInstruction', 'ExplorerInstruction', 'DownloadInstruction'] +__all__ = ["UpgradeInstruction", "UpgradeResult", "NothingInstruction", "ExplorerInstruction", "DownloadInstruction"] diff --git a/pros/upgrade/instructions/download_instructions.py b/pros/upgrade/instructions/download_instructions.py index 48f8b49e..7a428c8c 100644 --- a/pros/upgrade/instructions/download_instructions.py +++ b/pros/upgrade/instructions/download_instructions.py @@ -2,6 +2,7 @@ from typing import * from pros.common.utils import download_file + from .base_instructions import UpgradeInstruction, UpgradeResult @@ -9,7 +10,8 @@ class DownloadInstruction(UpgradeInstruction): """ Downloads a file """ - def __init__(self, url='', extension=None, download_description=None, success_explanation=None): + + def __init__(self, url="", extension=None, download_description=None, success_explanation=None): self.url: str = url self.extension: Optional[str] = extension self.download_description: Optional[str] = download_description @@ -21,14 +23,15 @@ def perform_upgrade(self) -> UpgradeResult: file = download_file(self.url, ext=self.extension, desc=self.download_description) assert file except (AssertionError, IOError) as e: - return UpgradeResult(False, explanation=f'Failed to download required file. ({e})', exception=e) + return UpgradeResult(False, explanation=f"Failed to download required file. ({e})", exception=e) if self.success_explanation: - explanation = self.success_explanation.replace('//FILE\\\\', file) \ - .replace('//SHORT\\\\', os.path.split(file)[1]) + explanation = self.success_explanation.replace("//FILE\\\\", file).replace( + "//SHORT\\\\", os.path.split(file)[1] + ) else: - explanation = f'Downloaded {os.path.split(file)[1]}' + explanation = f"Downloaded {os.path.split(file)[1]}" return UpgradeResult(True, explanation=explanation, file=file, origin=self.url) def __str__(self) -> str: - return 'Download required file.' + return "Download required file." diff --git a/pros/upgrade/instructions/explorer_instructions.py b/pros/upgrade/instructions/explorer_instructions.py index ae843ba3..c54748b9 100644 --- a/pros/upgrade/instructions/explorer_instructions.py +++ b/pros/upgrade/instructions/explorer_instructions.py @@ -11,8 +11,9 @@ def perform_upgrade(self) -> UpgradeResult: result = super().perform_upgrade() if result.successful: import click - click.launch(getattr(result, 'file')) + + click.launch(getattr(result, "file")) return result def __str__(self) -> str: - return 'Download required file.' + return "Download required file." diff --git a/pros/upgrade/instructions/nothing_instructions.py b/pros/upgrade/instructions/nothing_instructions.py index a3619173..1c11df8c 100644 --- a/pros/upgrade/instructions/nothing_instructions.py +++ b/pros/upgrade/instructions/nothing_instructions.py @@ -3,7 +3,7 @@ class NothingInstruction(UpgradeInstruction): def __str__(self) -> str: - return 'No automated instructions. View release notes for installation instructions.' + return "No automated instructions. View release notes for installation instructions." def perform_upgrade(self) -> UpgradeResult: return UpgradeResult(True) diff --git a/pros/upgrade/manifests/__init__.py b/pros/upgrade/manifests/__init__.py index 290f42c5..4e58eb16 100644 --- a/pros/upgrade/manifests/__init__.py +++ b/pros/upgrade/manifests/__init__.py @@ -1,8 +1,8 @@ from typing import * from .upgrade_manifest_v1 import UpgradeManifestV1 -from .upgrade_manifest_v2 import UpgradeManifestV2, PlatformsV2 +from .upgrade_manifest_v2 import PlatformsV2, UpgradeManifestV2 # Order of files manifests = [UpgradeManifestV2, UpgradeManifestV1] # type: List[Type] -__all__ = ['UpgradeManifestV1', 'UpgradeManifestV2', 'manifests', 'PlatformsV2'] +__all__ = ["UpgradeManifestV1", "UpgradeManifestV2", "manifests", "PlatformsV2"] diff --git a/pros/upgrade/manifests/upgrade_manifest_v1.py b/pros/upgrade/manifests/upgrade_manifest_v1.py index 51ba9346..f0187d4a 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v1.py +++ b/pros/upgrade/manifests/upgrade_manifest_v1.py @@ -1,6 +1,7 @@ from semantic_version import Version from pros.common.utils import get_version, logger + from ..instructions import UpgradeResult @@ -26,10 +27,12 @@ def describe_update(self) -> str: :return: """ if self.needs_upgrade: - return f'There is an update available! {self.version} is the latest version.\n' \ - f'Go to {self.info_url} to learn more.' + return ( + f"There is an update available! {self.version} is the latest version.\n" + f"Go to {self.info_url} to learn more." + ) else: - return f'You are up to date. ({self.version})' + return f"You are up to date. ({self.version})" def __str__(self): return self.describe_update() @@ -41,7 +44,8 @@ def can_perform_upgrade(self) -> bool: def perform_upgrade(self) -> UpgradeResult: logger(__name__).debug(self.__dict__) from click import launch + return UpgradeResult(launch(self.info_url) == 0) def describe_post_install(self, **kwargs) -> str: - return f'Download the latest version from {self.info_url}' + return f"Download the latest version from {self.info_url}" diff --git a/pros/upgrade/manifests/upgrade_manifest_v2.py b/pros/upgrade/manifests/upgrade_manifest_v2.py index b024aa3d..00ee656f 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v2.py +++ b/pros/upgrade/manifests/upgrade_manifest_v2.py @@ -3,8 +3,9 @@ from typing import * from pros.common import logger + +from ..instructions import NothingInstruction, UpgradeInstruction, UpgradeResult from .upgrade_manifest_v1 import UpgradeManifestV1 -from ..instructions import UpgradeInstruction, UpgradeResult, NothingInstruction class PlatformsV2(Enum): @@ -26,32 +27,34 @@ def __init__(self): super().__init__() self.platform_instructions: Dict[PlatformsV2, UpgradeInstruction] = {} - self._platform: 'PlatformsV2' = None + self._platform: "PlatformsV2" = None self._last_file: Optional[str] = None @property - def platform(self) -> 'PlatformsV2': + def platform(self) -> "PlatformsV2": """ Attempts to detect the current platform type :return: The detected platform type, or Unknown """ if self._platform is not None: return self._platform - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): import _constants - frozen_platform = getattr(_constants, 'FROZEN_PLATFORM_V1', None) + + frozen_platform = getattr(_constants, "FROZEN_PLATFORM_V1", None) if isinstance(frozen_platform, str): - if frozen_platform.startswith('Windows86'): + if frozen_platform.startswith("Windows86"): self._platform = PlatformsV2.Windows86 - elif frozen_platform.startswith('Windows64'): + elif frozen_platform.startswith("Windows64"): self._platform = PlatformsV2.Windows64 - elif frozen_platform.startswith('MacOS'): + elif frozen_platform.startswith("MacOS"): self._platform = PlatformsV2.MacOS else: try: from pip._vendor import pkg_resources - results = [p for p in pkg_resources.working_set if p.project_name.startswith('pros-cli')] + + results = [p for p in pkg_resources.working_set if p.project_name.startswith("pros-cli")] if any(results): self._platform = PlatformsV2.Pip except ImportError: @@ -67,12 +70,9 @@ def can_perform_upgrade(self) -> bool: def perform_upgrade(self) -> UpgradeResult: instructions: UpgradeInstruction = self.platform_instructions.get(self.platform, NothingInstruction()) logger(__name__).debug(self.__dict__) - logger(__name__).debug(f'Platform: {self.platform}') + logger(__name__).debug(f"Platform: {self.platform}") logger(__name__).debug(instructions.__dict__) return instructions.perform_upgrade() def __repr__(self): - return repr({ - 'platform': self.platform, - **self.__dict__ - }) + return repr({"platform": self.platform, **self.__dict__}) diff --git a/pros/upgrade/upgrade_manager.py b/pros/upgrade/upgrade_manager.py index 3ddcf8eb..efd39464 100644 --- a/pros/upgrade/upgrade_manager.py +++ b/pros/upgrade/upgrade_manager.py @@ -3,23 +3,24 @@ from enum import Enum from typing import * -from pros.common import logger import pros.common.ui as ui +from pros.common import logger from pros.config import Config from pros.config.cli_config import cli_config -from .manifests import * + from .instructions import UpgradeResult +from .manifests import * class ReleaseChannel(Enum): - Stable = 'stable' - Beta = 'beta' + Stable = "stable" + Beta = "beta" class UpgradeManager(Config): def __init__(self, file=None): if file is None: - file = os.path.join(cli_config().directory, 'upgrade.pros.json') + file = os.path.join(cli_config().directory, "upgrade.pros.json") self._last_check: datetime = datetime.min self._manifest: Optional[UpgradeManifestV1] = None self.release_channel: ReleaseChannel = ReleaseChannel.Stable @@ -29,22 +30,23 @@ def __init__(self, file=None): @property def has_stale_manifest(self): if self._manifest is None: - logger(__name__).debug('Upgrade manager\'s manifest is nonexistent') + logger(__name__).debug("Upgrade manager's manifest is nonexistent") if datetime.now() - self._last_check > cli_config().update_frequency: - logger(__name__).debug(f'Upgrade manager\'s last check occured at {self._last_check}.') - logger(__name__).debug(f'Was longer ago than update frequency ({cli_config().update_frequency}) allows.') + logger(__name__).debug(f"Upgrade manager's last check occured at {self._last_check}.") + logger(__name__).debug(f"Was longer ago than update frequency ({cli_config().update_frequency}) allows.") return (self._manifest is None) or (datetime.now() - self._last_check > cli_config().update_frequency) def get_manifest(self, force: bool = False) -> UpgradeManifestV1: if not force and not self.has_stale_manifest: return self._manifest - ui.echo('Fetching upgrade manifest...') - import requests - import jsonpickle + ui.echo("Fetching upgrade manifest...") import json - channel_url = f'https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}' + import jsonpickle + import requests + + channel_url = f"https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}" self._manifest = None manifest_urls = [f"{channel_url}/{manifest.__name__}.json" for manifest in manifests] @@ -58,13 +60,13 @@ def get_manifest(self, force: bool = False) -> UpgradeManifestV1: self.save() break except json.decoder.JSONDecodeError as e: - logger(__name__).warning(f'Failed to decode {manifest_url}') + logger(__name__).warning(f"Failed to decode {manifest_url}") logger(__name__).debug(e) else: - logger(__name__).debug(f'Failed to get {manifest_url} ({resp.status_code})') + logger(__name__).debug(f"Failed to get {manifest_url} ({resp.status_code})") if not self._manifest: manifest_list = "\n".join(manifest_urls) - logger(__name__).warning(f'Could not access any upgrade manifests from any of:\n{manifest_list}') + logger(__name__).warning(f"Could not access any upgrade manifests from any of:\n{manifest_list}") return self._manifest @property diff --git a/requirements.txt b/requirements.txt index c84eddb0..7b20814b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,19 @@ +cachetools click>=8 -rich-click +cobs +colorama +jsonpickle +observable +pre-commit +pyinstaller +pylint +pypng==0.0.20 pyserial -cachetools +pyzmq requests requests-futures -tabulate -jsonpickle -semantic_version -colorama -pyzmq -cobs +rich-click scan-build==2.0.13 +semantic_version sentry-sdk -observable -pypng==0.0.20 -pyinstaller +tabulate diff --git a/setup.py b/setup.py index f26a9741..6785b4e2 100644 --- a/setup.py +++ b/setup.py @@ -1,22 +1,18 @@ # setup.py for non-frozen builds -from setuptools import setup, find_packages +from setuptools import find_packages, setup + from install_requires import install_requires as install_reqs setup( - name='pros-cli', - version=open('pip_version').read().strip(), + name="pros-cli", + version=open("pip_version").read().strip(), packages=find_packages(), - url='https://github.com/purduesigbots/pros-cli', - license='MPL-2.0', - author='Purdue ACM SIGBots', - author_email='pros_development@cs.purdue.edu', - description='Command Line Interface for managing PROS projects', + url="https://github.com/purduesigbots/pros-cli", + license="MPL-2.0", + author="Purdue ACM SIGBots", + author_email="pros_development@cs.purdue.edu", + description="Command Line Interface for managing PROS projects", install_requires=install_reqs, - entry_points={ - 'console_scripts': [ - 'pros=pros.cli.main:main', - 'prosv5=pros.cli.main:main' - ] - } + entry_points={"console_scripts": ["pros=pros.cli.main:main", "prosv5=pros.cli.main:main"]}, ) diff --git a/version b/version index e5b82034..1545d966 100644 --- a/version +++ b/version @@ -1 +1 @@ -3.5.0 \ No newline at end of file +3.5.0 diff --git a/version.py b/version.py index 39542079..0523dbdf 100644 --- a/version.py +++ b/version.py @@ -3,33 +3,38 @@ from sys import stdout try: - with open(os.devnull, 'w') as devnull: - v = subprocess.check_output(['git', 'describe', '--tags', '--dirty', '--abbrev'], stderr=stdout).decode().strip() - if '-' in v: - bv = v[:v.index('-')] - bv = bv[:bv.rindex('.') + 1] + str(int(bv[bv.rindex('.') + 1:]) + 1) - sempre = 'dirty' if v.endswith('-dirty') else 'commit' - pippre = 'alpha' if v.endswith('-dirty') else 'pre' - build = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip() - number_since = subprocess.check_output( - ['git', 'rev-list', v[:v.index('-')] + '..HEAD', '--count']).decode().strip() - semver = bv + '-' + sempre + '+' + build + with open(os.devnull, "w") as devnull: + v = ( + subprocess.check_output(["git", "describe", "--tags", "--dirty", "--abbrev"], stderr=stdout) + .decode() + .strip() + ) + if "-" in v: + bv = v[: v.index("-")] + bv = bv[: bv.rindex(".") + 1] + str(int(bv[bv.rindex(".") + 1 :]) + 1) + sempre = "dirty" if v.endswith("-dirty") else "commit" + pippre = "alpha" if v.endswith("-dirty") else "pre" + build = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode().strip() + number_since = ( + subprocess.check_output(["git", "rev-list", v[: v.index("-")] + "..HEAD", "--count"]).decode().strip() + ) + semver = bv + "-" + sempre + "+" + build pipver = bv + pippre + number_since - winver = v[:v.index('-')] + '.' + number_since + winver = v[: v.index("-")] + "." + number_since else: semver = v pipver = v - winver = v + '.0' + winver = v + ".0" - with open('version', 'w') as f: - print('Semantic version is ' + semver) + with open("version", "w") as f: + print("Semantic version is " + semver) f.write(semver) - with open('pip_version', 'w') as f: - print('PIP version is ' + pipver) + with open("pip_version", "w") as f: + print("PIP version is " + pipver) f.write(pipver) - with open('win_version', 'w') as f: - print('Windows version is ' + winver) + with open("win_version", "w") as f: + print("Windows version is " + winver) f.write(winver) except Exception as e: - print('Error calling git') + print("Error calling git") print(e) diff --git a/win_version b/win_version index ec9d2348..5f56c9fd 100644 --- a/win_version +++ b/win_version @@ -1 +1 @@ -3.5.0.0 \ No newline at end of file +3.5.0.0