diff --git a/docs/userguides/compile.md b/docs/userguides/compile.md index 2a555f212c..6524b17771 100644 --- a/docs/userguides/compile.md +++ b/docs/userguides/compile.md @@ -54,8 +54,8 @@ By default, Ape ignores files `package.json`, `package-lock.json`, `tsconfig.jso To override this list, edit your `ape-config.yaml` similarly: ```yaml -compiler: - ignore_files: +compile: + exclude: - "*package.json" - "*package-lock.json" - "*tsconfig.json" diff --git a/setup.py b/setup.py index 55c49b8b60..e7a8a8e8ac 100644 --- a/setup.py +++ b/setup.py @@ -123,7 +123,7 @@ "web3[tester]>=6.12.0,<7", # ** Dependencies maintained by ApeWorX ** "eip712>=0.2.3,<0.4", - "ethpm-types>=0.6.2,<0.7", + "ethpm-types>=0.6.3,<0.7", "eth_pydantic_types>=0.1.0a4,<0.2", "evm-trace>=0.1.0", ], diff --git a/src/ape/api/compiler.py b/src/ape/api/compiler.py index d3c00b0e7c..b673dd1a91 100644 --- a/src/ape/api/compiler.py +++ b/src/ape/api/compiler.py @@ -1,6 +1,6 @@ from functools import cached_property from pathlib import Path -from typing import Dict, Iterator, List, Optional, Set, Tuple +from typing import Dict, Iterator, List, Optional, Sequence, Set, Tuple from eth_pydantic_types import HexBytes from ethpm_types import ContractType @@ -54,12 +54,12 @@ def settings(self) -> PluginConfig: return CustomConfig.model_validate(data) @abstractmethod - def get_versions(self, all_paths: List[Path]) -> Set[str]: + def get_versions(self, all_paths: Sequence[Path]) -> Set[str]: """ Retrieve the set of available compiler versions for this plugin to compile ``all_paths``. Args: - all_paths (List[pathlib.Path]): The list of paths. + all_paths (Sequence[pathlib.Path]): The list of paths. Returns: Set[str]: A set of available compiler versions. @@ -67,14 +67,14 @@ def get_versions(self, all_paths: List[Path]) -> Set[str]: @raises_not_implemented def get_compiler_settings( # type: ignore[empty-body] - self, contract_filepaths: List[Path], base_path: Optional[Path] = None + self, contract_filepaths: Sequence[Path], base_path: Optional[Path] = None ) -> Dict[Version, Dict]: """ Get a mapping of the settings that would be used to compile each of the sources by the compiler version number. Args: - contract_filepaths (List[pathlib.Path]): The list of paths. + contract_filepaths (Sequence[pathlib.Path]): The list of paths. base_path (Optional[pathlib.Path]): The contracts folder base path. Returns: @@ -83,13 +83,13 @@ def get_compiler_settings( # type: ignore[empty-body] @abstractmethod def compile( - self, contract_filepaths: List[Path], base_path: Optional[Path] + self, contract_filepaths: Sequence[Path], base_path: Optional[Path] ) -> List[ContractType]: """ Compile the given source files. All compiler plugins must implement this function. Args: - contract_filepaths (List[pathlib.Path]): A list of source file paths to compile. + contract_filepaths (Sequence[pathlib.Path]): A list of source file paths to compile. base_path (Optional[pathlib.Path]): Optionally provide the base path, such as the project ``contracts/`` directory. Defaults to ``None``. When using in a project via ``ape compile``, gets set to the project's ``contracts/`` directory. @@ -122,14 +122,14 @@ def compile_code( # type: ignore[empty-body] @raises_not_implemented def get_imports( # type: ignore[empty-body] - self, contract_filepaths: List[Path], base_path: Optional[Path] + self, contract_filepaths: Sequence[Path], base_path: Optional[Path] ) -> Dict[str, List[str]]: """ Returns a list of imports as source_ids for each contract's source_id in a given compiler. Args: - contract_filepaths (List[pathlib.Path]): A list of source file paths to compile. + contract_filepaths (Sequence[pathlib.Path]): A list of source file paths to compile. base_path (Optional[pathlib.Path]): Optionally provide the base path, such as the project ``contracts/`` directory. Defaults to ``None``. When using in a project via ``ape compile``, gets set to the project's ``contracts/`` directory. @@ -141,14 +141,14 @@ def get_imports( # type: ignore[empty-body] @raises_not_implemented def get_version_map( # type: ignore[empty-body] self, - contract_filepaths: List[Path], + contract_filepaths: Sequence[Path], base_path: Optional[Path] = None, ) -> Dict[Version, Set[Path]]: """ Get a map of versions to source paths. Args: - contract_filepaths (List[Path]): Input source paths. Defaults to all source paths + contract_filepaths (Sequence[Path]): Input source paths. Defaults to all source paths per compiler. base_path (Path): The base path of sources. Defaults to the project's ``contracts_folder``. diff --git a/src/ape/api/networks.py b/src/ape/api/networks.py index a24dc0256a..0f0b8c73ac 100644 --- a/src/ape/api/networks.py +++ b/src/ape/api/networks.py @@ -10,6 +10,7 @@ Iterator, List, Optional, + Sequence, Tuple, Type, Union, @@ -362,12 +363,12 @@ def encode_transaction( """ @abstractmethod - def decode_logs(self, logs: List[Dict], *events: EventABI) -> Iterator["ContractLog"]: + def decode_logs(self, logs: Sequence[Dict], *events: EventABI) -> Iterator["ContractLog"]: """ Decode any contract logs that match the given event ABI from the raw log data. Args: - logs (List[Dict]): A list of raw log data from the chain. + logs (Sequence[Dict]): A list of raw log data from the chain. *events (EventABI): Event definitions to decode. Returns: diff --git a/src/ape/api/projects.py b/src/ape/api/projects.py index aa2a7a26b5..5505c8ffd5 100644 --- a/src/ape/api/projects.py +++ b/src/ape/api/projects.py @@ -2,13 +2,12 @@ import re import tempfile from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Sequence, Union from ethpm_types import Checksum, Compiler, ContractType, PackageManifest, Source from ethpm_types.source import Content -from ethpm_types.utils import Algorithm, compute_checksum from packaging.version import InvalidVersion, Version -from pydantic import AnyUrl +from pydantic import AnyUrl, ValidationError from ape.exceptions import ProjectError from ape.logging import logger @@ -61,15 +60,19 @@ def is_valid(self) -> bool: Useful for figuring out the best ``ProjectAPI`` to use when compiling a project. """ + @property + def manifest(self) -> PackageManifest: + return self.cached_manifest or PackageManifest() + @abstractmethod def create_manifest( - self, file_paths: Optional[List[Path]] = None, use_cache: bool = True + self, file_paths: Optional[Sequence[Path]] = None, use_cache: bool = True ) -> PackageManifest: """ Create a manifest from the project. Args: - file_paths (Optional[List[Path]]): An optional list of paths to compile + file_paths (Optional[Sequence[Path]]): An optional list of paths to compile from this project. use_cache (bool): Set to ``False`` to clear caches and force a re-compile. @@ -115,11 +118,7 @@ def cached_manifest(self) -> Optional[PackageManifest]: # This scenario happens if changing branches and you have some contracts on # one branch and others on the next. for name, contract in self.contracts.items(): - source_id = contract.source_id - if not contract.source_id: - continue - - if source_id in (manifest.sources or {}): + if (source_id := contract.source_id) and source_id in (manifest.sources or {}): contracts[name] = contract manifest.contract_types = contracts @@ -151,6 +150,29 @@ def _cache_folder(self) -> Path: folder.mkdir(exist_ok=True, parents=True) return folder + def update_manifest(self, **kwargs) -> PackageManifest: + """ + Add additional package manifest parts to the cache. + + Args: + **kwargs: Fields from ``ethpm_types.manifest.PackageManifest``. + """ + new_manifest = self.manifest.model_copy(update=kwargs) + return self.replace_manifest(new_manifest) + + def replace_manifest(self, manifest: PackageManifest) -> PackageManifest: + """ + Replace the entire cached manifest. + + Args: + manifest (``ethpm_types.manifest.PackageManifest``): The manifest + to use. + """ + self.manifest_cachefile.unlink(missing_ok=True) + self.manifest_cachefile.write_text(manifest.model_dump_json()) + self._cached_manifest = manifest + return manifest + def process_config_file(self, **kwargs) -> bool: """ Process the project's config file. @@ -159,24 +181,112 @@ def process_config_file(self, **kwargs) -> bool: return False - @classmethod - def _create_manifest( - cls, + def add_compiler_data(self, compiler_data: Sequence[Compiler]) -> List[Compiler]: + """ + Add compiler data to the existing cached manifest. + + Args: + compiler_data (List[``ethpm_types.Compiler``]): Compilers to add. + + Returns: + List[``ethpm_types.source.Compiler``]: The full list of compilers. + """ + # Validate given data. + given_compilers = set(compiler_data) + if len(given_compilers) != len(compiler_data): + raise ProjectError( + f"`{self.add_compiler_data.__name__}()` was given multiple of the same compiler. " + "Please filter inputs." + ) + + # Filter out given compilers without contract types. + given_compilers = {c for c in given_compilers if c.contractTypes} + if len(given_compilers) != len(compiler_data): + logger.warning( + f"`{self.add_compiler_data.__name__}()` given compilers without contract types. " + "Ignoring these inputs." + ) + + for given_compiler in given_compilers: + other_given_compilers = [c for c in given_compilers if c != given_compiler] + contract_types_from_others = [ + n for c in other_given_compilers for n in (c.contractTypes or []) + ] + + collisions = { + n for n in (given_compiler.contractTypes or []) if n in contract_types_from_others + } + if collisions: + collide_str = ", ".join(collisions) + raise ProjectError(f"Contract type(s) '{collide_str}' collision across compilers.") + + new_types = [n for c in given_compilers for n in (c.contractTypes or [])] + + # Merge given compilers with existing compilers. + existing_compilers = self.manifest.compilers or [] + + # Existing compilers remaining after processing new compilers. + remaining_existing_compilers: List[Compiler] = [] + + for existing_compiler in existing_compilers: + find_iter = iter(x for x in compiler_data if x == existing_compiler) + + if matching_given_compiler := next(find_iter, None): + # Compiler already exists in the system, possibly with different contract types. + # Merge contract types. + matching_given_compiler.contractTypes = list( + { + *(existing_compiler.contractTypes or []), + *(matching_given_compiler.contractTypes or []), + } + ) + # NOTE: Purposely we don't add the exising compiler back, + # as it is the same as the given compiler, (meaning same + # name, version, and settings), and we have + # merged their contract types. + + continue + + else: + # Filter out contract types added now under a different compiler. + existing_compiler.contractTypes = [ + c for c in (existing_compiler.contractTypes or []) if c not in new_types + ] + + # Remove compilers without contract types. + if existing_compiler.contractTypes: + remaining_existing_compilers.append(existing_compiler) + + # Use Compiler.__hash__ to remove duplicated. + # Also, sort for consistency. + compilers = sorted( + list({*remaining_existing_compilers, *compiler_data}), + key=lambda x: f"{x.name}@{x.version}", + ) + manifest = self.update_manifest(compilers=compilers) + return manifest.compilers or compilers # Or for mypy. + + def update_manifest_sources( + self, source_paths: List[Path], contracts_path: Path, contract_types: Dict[str, ContractType], name: Optional[str] = None, version: Optional[str] = None, - initial_manifest: Optional[PackageManifest] = None, compiler_data: Optional[List[Compiler]] = None, + **kwargs: Any, ) -> PackageManifest: - manifest = initial_manifest or PackageManifest() - manifest.name = name.lower() if name is not None else manifest.name - manifest.version = version or manifest.version - manifest.sources = cls._create_source_dict(source_paths, contracts_path) - manifest.contract_types = contract_types - manifest.compilers = compiler_data or [] - return manifest + items: Dict = { + "contract_types": contract_types, + "sources": self._create_source_dict(source_paths, contracts_path), + "compilers": compiler_data or [], + } + if name is not None: + items["name"] = name.lower() + if version: + items["version"] = version + + return self.update_manifest(**{**items, **kwargs}) @classmethod def _create_source_dict( @@ -204,10 +314,7 @@ def _create_source_dict( text = source_path.read_text() source_dict[key] = Source( - checksum=Checksum( - algorithm=Algorithm.MD5, - hash=compute_checksum(source_path.read_bytes()), - ), + checksum=Checksum.from_file(source_path), urls=[], content=Content(root={i + 1: x for i, x in enumerate(text.splitlines())}), imports=source_imports.get(key, []), @@ -383,7 +490,7 @@ def compile(self, use_cache: bool = True) -> PackageManifest: ) return compiled_manifest - self._write_manifest_to_cache(compiled_manifest) + self.replace_manifest(compiled_manifest) return compiled_manifest def _extract_local_manifest( @@ -410,7 +517,7 @@ def _extract_local_manifest( else: # Was given a path to a manifest JSON. - self._write_manifest_to_cache(manifest) + self.replace_manifest(manifest) return manifest elif (project_path.parent / project_path.name.replace("-", "_")).is_dir(): @@ -433,18 +540,23 @@ def _extract_local_manifest( project = pm.local_project sources = self._get_sources(project) dependencies = self.project_manager._extract_manifest_dependencies() - project_manifest = project._create_manifest( - sources, project.contracts_folder, {}, name=project.name, version=project.version - ) - compiler_data = self.project_manager.get_compiler_data(compile_if_needed=False) - if dependencies: - project_manifest.dependencies = dependencies - if compiler_data: - project_manifest.compilers = compiler_data + extras: Dict = {} + if dependencies: + extras["dependencies"] = dependencies + + project.update_manifest_sources( + sources, + project.contracts_folder, + {}, + name=project.name, + version=project.version, + **extras, + ) - self._write_manifest_to_cache(project_manifest) - return project_manifest + # Replace the dependency's manifest with the temp project's. + self.replace_manifest(project.manifest) + return project.manifest def _get_sources(self, project: ProjectAPI) -> List[Path]: escaped_extensions = [re.escape(ext) for ext in self.compiler_manager.registered_compilers] @@ -458,7 +570,7 @@ def _get_sources(self, project: ProjectAPI) -> List[Path]: return [s for s in all_sources if s not in excluded_files] - def _write_manifest_to_cache(self, manifest: PackageManifest): + def replace_manifest(self, manifest: PackageManifest): self._target_manifest_cache_file.unlink(missing_ok=True) self._target_manifest_cache_file.parent.mkdir(exist_ok=True, parents=True) self._target_manifest_cache_file.write_text(manifest.model_dump_json()) @@ -471,9 +583,10 @@ def _load_manifest_from_file(file_path: Path) -> Optional[PackageManifest]: try: return PackageManifest.model_validate_json(file_path.read_text()) - except Exception as err: - logger.warning(f"Existing manifest file '{file_path}' corrupted. Re-building.") - logger.debug(str(err)) + except ValidationError as err: + logger.warning( + f"Existing manifest file '{file_path}' corrupted (problem={err}). Re-building." + ) return None diff --git a/src/ape/managers/compilers.py b/src/ape/managers/compilers.py index a151e0e05e..be6186cc93 100644 --- a/src/ape/managers/compilers.py +++ b/src/ape/managers/compilers.py @@ -125,9 +125,9 @@ def compile( if x.source_id ] + exclusions = self.config_manager.get_config("compile").exclude for extension in extensions: - path_patterns_to_ignore = self.config_manager.compiler.ignore_files - ignore_path_lists = [contracts_folder.rglob(p) for p in path_patterns_to_ignore] + ignore_path_lists = [contracts_folder.rglob(p) for p in exclusions] paths_to_ignore = [ contracts_folder / get_relative_path(p, contracts_folder) for files in ignore_path_lists diff --git a/src/ape/managers/config.py b/src/ape/managers/config.py index 698f31e99b..7e8a80b962 100644 --- a/src/ape/managers/config.py +++ b/src/ape/managers/config.py @@ -23,11 +23,6 @@ class DeploymentConfig(PluginConfig): contract_type: str -class CompilerConfig(PluginConfig): - ignore_files: List[str] = ["*package.json", "*package-lock.json", "*tsconfig.json"] - """List of globular files to ignore""" - - class DeploymentConfigCollection(RootModel[dict]): @model_validator(mode="before") @classmethod @@ -107,9 +102,6 @@ class ConfigManager(BaseInterfaceModel): meta: PackageMeta = PackageMeta() """Metadata about the project.""" - compiler: CompilerConfig = CompilerConfig() - """Global compiler information.""" - contracts_folder: Path = None # type: ignore """ The path to the project's ``contracts/`` directory @@ -141,11 +133,17 @@ def packages_folder(self) -> Path: self.dependency_manager.packages_folder.mkdir(parents=True, exist_ok=True) return self.dependency_manager.packages_folder + @property + def _project_key(self) -> str: + return self.PROJECT_FOLDER.stem + + @property + def _project_configs(self) -> Dict[str, Any]: + return self._cached_configs.get(self._project_key, {}) + @property def _plugin_configs(self) -> Dict[str, PluginConfig]: - project_name = self.PROJECT_FOLDER.stem - if project_name in self._cached_configs: - cache = self._cached_configs[project_name] + if cache := self._cached_configs.get(self._project_key): self.name = cache.get("name", "") self.version = cache.get("version", "") self.default_ecosystem = cache.get("default_ecosystem", "ethereum") @@ -153,7 +151,6 @@ def _plugin_configs(self) -> Dict[str, PluginConfig]: self.dependencies = cache.get("dependencies", []) self.deployments = cache.get("deployments", {}) self.contracts_folder = cache.get("contracts_folder", self.PROJECT_FOLDER / "contracts") - self.compiler = CompilerConfig.model_validate(cache.get("compiler", {})) return cache # First, load top-level configs. Then, load all the plugin configs. @@ -178,9 +175,6 @@ def _plugin_configs(self) -> Dict[str, PluginConfig]: self.default_ecosystem = configs["default_ecosystem"] = user_config.pop( "default_ecosystem", "ethereum" ) - compiler_dict = user_config.pop("compiler", CompilerConfig().model_dump(mode="json")) - configs["compiler"] = compiler_dict - self.compiler = CompilerConfig(**compiler_dict) dependencies = user_config.pop("dependencies", []) or [] if not isinstance(dependencies, list): @@ -230,7 +224,7 @@ def _plugin_configs(self) -> Dict[str, PluginConfig]: "Plugins may not be installed yet or keys may be mis-spelled." ) - self._cached_configs[project_name] = configs + self._cached_configs[self._project_key] = configs return configs def __repr__(self): diff --git a/src/ape/managers/project/manager.py b/src/ape/managers/project/manager.py index 84944c0f1e..5deacb8c34 100644 --- a/src/ape/managers/project/manager.py +++ b/src/ape/managers/project/manager.py @@ -131,7 +131,7 @@ def sources_missing(self) -> bool: in the project. ``False`` otherwise. """ - return not self.contracts_folder.is_dir() or not self.contracts_folder.iterdir() + return len(self.source_paths) <= 0 @property def interfaces_folder(self) -> Path: @@ -171,7 +171,7 @@ def compiler_data(self) -> List[Compiler]: """ A list of ``Compiler`` objects representing the raw-data specifics of a compiler. """ - return self._get_compiler_data() + return self.get_compiler_data() def get_compiler_data(self, compile_if_needed: bool = True) -> List[Compiler]: """ @@ -184,14 +184,27 @@ def get_compiler_data(self, compile_if_needed: bool = True) -> List[Compiler]: Returns: List[Compiler] """ - return self._get_compiler_data(compile_if_needed=compile_if_needed) + if compilers := self._get_cached_compiler_data(): + # Compiler data was already in manifest + # (from compiler plugins). + return compilers - def _get_compiler_data(self, compile_if_needed: bool = True): - contract_types: Iterable[ContractType] = ( - self.contracts.values() - if compile_if_needed - else self._get_cached_contract_types().values() - ) + elif compile_if_needed: + return self._derive_settings() + + return [] + + def _get_cached_compiler_data(self) -> List[Compiler]: + if not (cached_manifest := self.local_project.cached_manifest): + return [] + + elif not (compilers := cached_manifest.compilers): + return [] + + return compilers + + def _derive_settings(self) -> List[Compiler]: + contract_types: Iterable[ContractType] = self.load_contracts().values() compiler_list: List[Compiler] = [] contracts_folder = self.config_manager.contracts_folder for ext, compiler in self.compiler_manager.registered_compilers.items(): @@ -223,7 +236,7 @@ def _get_compiler_data(self, compile_if_needed: bool = True): contract_type_names = [ct.name for ct in filtered_contract_types if ct.name] compiler_list.append( Compiler( - name=compiler.name, + name=compiler.name.lower(), version=str(version), settings=version_settings, contractTypes=contract_type_names, @@ -363,12 +376,14 @@ def get_project( ) if not contracts_folder.is_dir(): extensions = list(self.compiler_manager.registered_compilers.keys()) - path_patterns_to_ignore = self.config_manager.compiler.ignore_files - def find_contracts_folder(sub_dir: Path) -> Optional[Path]: + def find_contracts_folder( + sub_dir: Path, exclusions: Optional[List[str]] = None + ) -> Optional[Path]: # Check if config file exists + exclusions = exclusions or [] files_to_ignore = [] - for pattern in path_patterns_to_ignore: + for pattern in exclusions: files_to_ignore.extend(list(sub_dir.glob(pattern))) next_subs = [] @@ -385,13 +400,17 @@ def find_contracts_folder(sub_dir: Path) -> Optional[Path]: # No source was found. Search next level of dirs. for next_sub in next_subs: - found = find_contracts_folder(next_sub) - if found: + if found := find_contracts_folder(next_sub, exclusions=exclusions): return found return None - contracts_folder = find_contracts_folder(path) or contracts_folder + if cfg := self.config_manager._project_configs.get("compile"): + excls = cfg.model_dump().get("exclude", []) + else: + excls = [] + + contracts_folder = find_contracts_folder(path, exclusions=excls) or contracts_folder def _try_create_project(proj_cls: Type[ProjectAPI]) -> Optional[ProjectAPI]: with self.config_manager.using_project( @@ -410,8 +429,7 @@ def _try_create_project(proj_cls: Type[ProjectAPI]) -> Optional[ProjectAPI]: project_plugin_types = [pt for pt in self.project_types if not issubclass(pt, ApeProject)] for project_cls in project_plugin_types: - project = _try_create_project(project_cls) - if project: + if project := _try_create_project(project_cls): self._cached_projects[path.name] = project return project @@ -699,12 +717,6 @@ def load_contracts( ) return manifest.contract_types or {} - def _get_cached_contract_types(self) -> Dict[str, ContractType]: - if not self.local_project.cached_manifest: - return {} - - return self.local_project.cached_manifest.contract_types or {} - def load_dependencies(self, use_cache: bool = True) -> Dict[str, Dict[str, DependencyAPI]]: return self.dependency_manager.load_dependencies(self.path.as_posix(), use_cache=use_cache) diff --git a/src/ape/managers/project/types.py b/src/ape/managers/project/types.py index b8544140d1..5a90d81894 100644 --- a/src/ape/managers/project/types.py +++ b/src/ape/managers/project/types.py @@ -1,6 +1,6 @@ import os from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Sequence from ethpm_types import ContractType, PackageManifest, Source from ethpm_types.utils import compute_checksum @@ -160,12 +160,11 @@ def process_config_file(self, **kwargs) -> bool: return True def create_manifest( - self, file_paths: Optional[List[Path]] = None, use_cache: bool = True + self, file_paths: Optional[Sequence[Path]] = None, use_cache: bool = True ) -> PackageManifest: # Read the project config and migrate project-settings to Ape settings if needed. compile_config = self.config_manager.get_config("compile") self.project_manager.load_dependencies() - manifest = self._get_base_manifest(use_cache=use_cache) source_paths: List[Path] = list( set( [p for p in self.source_paths if p in file_paths] @@ -177,27 +176,31 @@ def create_manifest( ] ) ) + + manifest = self.manifest if use_cache else PackageManifest() + + # Generate sources and contract types. project_sources = _ProjectSources( manifest, source_paths, self.contracts_folder, self._cache_folder ) contract_types = project_sources.remaining_cached_contract_types compiled_contract_types = self._compile(project_sources) contract_types.update(compiled_contract_types) + # NOTE: We need to prevent compilation or else we get an endless loop, because # compilation results in creating a manifest, which triggers compilation, etc. compiler_data = self.project_manager.get_compiler_data(compile_if_needed=False) - manifest = self._create_manifest( + + # Apply source and contracts to manifest. + self.update_manifest_sources( source_paths, self.contracts_folder, contract_types, - initial_manifest=manifest, name=self.name, version=self.version, compiler_data=compiler_data, ) - # Cache the updated manifest so `self.cached_manifest` reads it next time - self.manifest_cachefile.write_text(manifest.model_dump_json()) - self._cached_manifest = manifest + if compiled_contract_types: for name, contract_type in compiled_contract_types.items(): file = self.project_manager.local_project._cache_folder / f"{name}.json" @@ -205,7 +208,8 @@ def create_manifest( self._contracts = self._contracts or {} self._contracts[name] = contract_type - return manifest + # Is cached. + return self.manifest def _compile(self, project_sources: _ProjectSources) -> Dict[str, ContractType]: def _compile_sources(proj_srcs: _ProjectSources) -> Dict[str, ContractType]: @@ -257,16 +261,6 @@ def _compile_sources(proj_srcs: _ProjectSources) -> Dict[str, ContractType]: # Already in project return _compile_sources(project_sources) - def _get_base_manifest(self, use_cache: bool = True) -> PackageManifest: - if self.cached_manifest and use_cache: - return self.cached_manifest - - manifest = PackageManifest() - if self.manifest_cachefile.is_file(): - self.manifest_cachefile.unlink() - - return manifest - class ApeProject(BaseProject): """ diff --git a/src/ape/utils/basemodel.py b/src/ape/utils/basemodel.py index d9d8c7bd70..4b8c0be665 100644 --- a/src/ape/utils/basemodel.py +++ b/src/ape/utils/basemodel.py @@ -1,5 +1,5 @@ from abc import ABC -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Iterator, List, Optional, Set, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Dict, Iterator, List, Optional, Union, cast from ethpm_types import BaseModel as EthpmTypesBaseModel from pydantic import BaseModel as RootBaseModel @@ -22,6 +22,31 @@ from ape.pytest.runners import PytestApeRunner +class _RecursionChecker: + # A helper for preventing the recursion errors + # that happen in custom __getattr__ methods. + + THRESHOLD: int = 10 + getattr_checking: Dict[str, int] = {} + getattr_errors: Dict[str, Exception] = {} + + def check(self, name: str) -> bool: + return (self.getattr_checking.get(name, 0) or 0) >= self.THRESHOLD + + def add(self, name: str): + if name in self.getattr_errors: + self.getattr_checking[name] += 1 + else: + self.getattr_checking[name] = 1 + + def reset(self): + self.getattr_checking = {} + self.getattr_errors = {} + + +_recursion_checker = _RecursionChecker() + + class injected_before_use(property): """ Injected properties are injected class variables that must be set before use. @@ -193,9 +218,6 @@ class BaseModel(EthpmTypesBaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) - __getattr_checking__: Set[str] = set() - __getattr_errors__: Dict[str, Exception] = {} - def __ape_extra_attributes__(self) -> Iterator[ExtraModelAttributes]: """ Override this method to supply extra attributes @@ -214,31 +236,27 @@ def __getattr__(self, name: str) -> Any: account :meth:`~ape.utils.basemodel.BaseModel.__ape_extra_attributes__`. """ - def _clear_from_caches(n): - if n in self.__getattr_checking__: - self.__getattr_checking__.remove(n) - self.__getattr_errors__.pop(n, "") - private_attrs = self.__pydantic_private__ or {} if name in private_attrs: - _clear_from_caches(name) + _recursion_checker.reset() return private_attrs[name] - elif name in self.__getattr_checking__: + elif _recursion_checker.check(name): # Prevent recursive error. # First, attempt to get real error. message = f"Failed trying to get {name}" - if real_error := self.__getattr_errors__.get(name): + if real_error := _recursion_checker.getattr_errors.get(name): message = f"{message}. {real_error}" - _clear_from_caches(name) + _recursion_checker.reset() raise AttributeError(message) - self.__getattr_checking__.add(name) + _recursion_checker.add(name) + try: res = super().__getattribute__(name) except AttributeError as err: - self.__getattr_errors__[name] = err + _recursion_checker.getattr_errors[name] = err extras_checked = set() for ape_extra in self.__ape_extra_attributes__(): if not ape_extra.include_getattr: @@ -247,7 +265,7 @@ def _clear_from_caches(n): if name in ape_extra: # Attribute was found in one of the supplied # extra attributes mappings. - _clear_from_caches(name) + _recursion_checker.reset() return ape_extra.get(name) extras_checked.add(ape_extra.name) @@ -255,9 +273,9 @@ def _clear_from_caches(n): # The error message mentions the alternative mappings, # such as a contract-type map. base_err = None - if name in self.__getattr_errors__: + if name in _recursion_checker.getattr_errors: # There was an error getting the value. Show that. - base_err = self.__getattr_errors__[name] + base_err = _recursion_checker.getattr_errors[name] message = str(base_err) else: @@ -266,14 +284,14 @@ def _clear_from_caches(n): extras_str = ", ".join(extras_checked) message = f"{message}. Also checked '{extras_str}'" - _clear_from_caches(name) + _recursion_checker.reset() attr_err = ApeAttributeError(message) if base_err: raise attr_err from base_err else: raise attr_err - _clear_from_caches(name) + _recursion_checker.reset() return res def __getitem__(self, name: Any) -> Any: diff --git a/src/ape_compile/__init__.py b/src/ape_compile/__init__.py index 09e753ceba..c363651d4b 100644 --- a/src/ape_compile/__init__.py +++ b/src/ape_compile/__init__.py @@ -18,7 +18,7 @@ class Config(PluginConfig): should configure ``include_dependencies`` to be ``True``. """ - exclude: List[str] = [] + exclude: List[str] = ["*package.json", "*package-lock.json", "*tsconfig.json"] """ Source exclusion globs across all file types. """ diff --git a/src/ape_ethereum/ecosystem.py b/src/ape_ethereum/ecosystem.py index c9b8f347e6..9bca4eec81 100644 --- a/src/ape_ethereum/ecosystem.py +++ b/src/ape_ethereum/ecosystem.py @@ -1,6 +1,6 @@ import re from copy import deepcopy -from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union, cast +from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Type, Union, cast from eth_abi import decode, encode from eth_abi.exceptions import InsufficientDataBytes, NonEmptyPaddingBytes @@ -669,7 +669,7 @@ def create_transaction(self, **kwargs) -> TransactionAPI: return txn_class(**kwargs) - def decode_logs(self, logs: List[Dict], *events: EventABI) -> Iterator["ContractLog"]: + def decode_logs(self, logs: Sequence[Dict], *events: EventABI) -> Iterator["ContractLog"]: if not logs: return diff --git a/src/ape_pm/compiler.py b/src/ape_pm/compiler.py index 408917dc26..2c4ed22472 100644 --- a/src/ape_pm/compiler.py +++ b/src/ape_pm/compiler.py @@ -1,6 +1,6 @@ import json from pathlib import Path -from typing import List, Optional, Set +from typing import List, Optional, Sequence, Set from eth_pydantic_types import HexBytes from eth_utils import is_0x_prefixed @@ -17,15 +17,14 @@ class InterfaceCompiler(CompilerAPI): def name(self) -> str: return "ethpm" - def get_versions(self, all_paths: List[Path]) -> Set[str]: + def get_versions(self, all_paths: Sequence[Path]) -> Set[str]: # NOTE: This bypasses the serialization of this compiler into the package manifest's # ``compilers`` field. You should not do this with a real compiler plugin. return set() def compile( - self, filepaths: List[Path], base_path: Optional[Path] = None + self, filepaths: Sequence[Path], base_path: Optional[Path] = None ) -> List[ContractType]: - filepaths.sort() # Sort to assist in reproducing consistent results. contract_types: List[ContractType] = [] for path in filepaths: source_path = ( diff --git a/tests/conftest.py b/tests/conftest.py index acfc9d8dd5..7d394a4f18 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -228,7 +228,12 @@ def networks_connected_to_tester(eth_tester_provider): @pytest.fixture def geth_provider(networks): - if not networks.active_provider or networks.provider.name != "geth": + if ( + not networks.active_provider + or networks.provider.name != "geth" + or not networks.provider.is_connected + or getattr(networks.provider, "uri", "") != GETH_URI + ): test_acct_100 = "0x63c7f11162dBFC374DC6f5C0B3Aa26C618846a85" with networks.ethereum.local.use_provider( "geth", provider_settings={"uri": GETH_URI, "extra_funded_accounts": [test_acct_100]} diff --git a/tests/functional/test_accounts.py b/tests/functional/test_accounts.py index 548ff137dd..db16660861 100644 --- a/tests/functional/test_accounts.py +++ b/tests/functional/test_accounts.py @@ -77,7 +77,7 @@ def test_recover_signer(signer, message): def test_sign_eip712_message(signer): - foo = Foo(signer.address) # type: ignore + foo = Foo(signer.address) # type: ignore[call-arg] message = foo.signable_message signature = signer.sign_message(message) assert signer.check_signature(message, signature) diff --git a/tests/functional/test_project.py b/tests/functional/test_project.py index 89dca1f39c..cc5c4b0b6f 100644 --- a/tests/functional/test_project.py +++ b/tests/functional/test_project.py @@ -4,6 +4,7 @@ import pytest import yaml +from ethpm_types import Compiler from ethpm_types import ContractInstance as EthPMContractInstance from ethpm_types import ContractType, Source from ethpm_types.manifest import PackageManifest @@ -329,10 +330,12 @@ def test_track_deployment_from_unknown_contract_given_txn_hash( assert actual.runtime_bytecode == contract.contract_type.runtime_bytecode -def test_compiler_data(config, project_path, contracts_folder): - # See ape-solidity / ape-vyper for better tests +def test_compiler_data_and_update_cache(config, project_path, contracts_folder): with config.using_project(project_path, contracts_folder=contracts_folder) as project: - assert not project.compiler_data + compiler = Compiler(name="comp", version="1.0.0") + project.local_project.update_manifest(compilers=[compiler]) + assert project.local_project.manifest.compilers == [compiler] + assert project.compiler_data == [compiler] def test_get_project_without_contracts_path(project): @@ -436,3 +439,88 @@ def test_source_paths_excludes_cached_dependencies(project_with_contract): shutil.copy(contract, dep_contract) actual = project_with_contract.source_paths assert dep_contract not in actual + + +def test_update_manifest_compilers(project): + compiler = Compiler(name="comp", version="1.0.0", contractTypes=["foo.txt"]) + project.local_project.update_manifest(compilers=[compiler]) + actual = project.local_project.manifest.compilers + assert actual == [compiler] + + project.local_project.update_manifest(name="test", version="1.0.0") + assert project.local_project.manifest.name == "test" + assert project.local_project.manifest.version == "1.0.0" + + # The compilers should not have changed. + actual = project.local_project.manifest.compilers + assert actual == [compiler] + + # Add a new one. + # NOTE: `update_cache()` will override the fields entirely. + # You must include existing fields if you want to merge. + compiler_2 = Compiler(name="test", version="2.0.0", contractTypes=["bar.txt"]) + project.local_project.update_manifest(compilers=[compiler_2]) + actual = project.local_project.manifest.compilers + assert actual == [compiler_2] + + +def test_load_contracts(project_with_contract): + contracts = project_with_contract.load_contracts() + assert len(contracts) > 0 + assert contracts == project_with_contract.contracts + + +def test_add_compiler_data(project_with_dependency_config): + # NOTE: Using different project than default to lessen + # chance of race-conditions from multi-process test runners. + project = project_with_dependency_config + + # Load contracts so that any compilers that may exist are present. + project.load_contracts() + start_compilers = project.local_project.manifest.compilers or [] + + # NOTE: Pre-defining things to lessen chance of race condition. + compiler = Compiler(name="comp", version="1.0.0", contractTypes=["foo"]) + compiler_2 = Compiler(name="test", version="2.0.0", contractTypes=["bar", "stay"]) + + # NOTE: Has same contract as compiler 2 and thus replaces the contract. + compiler_3 = Compiler(name="test", version="3.0.0", contractTypes=["bar"]) + + proj = project.local_project + argument = [compiler] + second_arg = [compiler_2] + third_arg = [compiler_3] + first_exp = [*start_compilers, compiler] + final_exp = [*first_exp, compiler_2] + + # Add twice to show it's only added once. + proj.add_compiler_data(argument) + proj.add_compiler_data(argument) + assert proj.manifest.compilers == first_exp + + # NOTE: `add_compiler_data()` will not override existing compilers. + # Use `update_cache()` for that. + proj.add_compiler_data(second_arg) + assert proj.manifest.compilers == final_exp + + proj.add_compiler_data(third_arg) + comp = [c for c in proj.manifest.compilers if c.name == "test" and c.version == "2.0.0"][0] + assert "bar" not in comp.contractTypes + + # Show that compilers without contract types go away. + (compiler_3.contractTypes or []).append("stay") + proj.add_compiler_data(third_arg) + comp_check = [c for c in proj.manifest.compilers if c.name == "test" and c.version == "2.0.0"] + assert not comp_check + + # Show error on multiple of same compiler. + compiler_4 = Compiler(name="test123", version="3.0.0", contractTypes=["bar"]) + compiler_5 = Compiler(name="test123", version="3.0.0", contractTypes=["baz"]) + with pytest.raises(ProjectError, match=r".*was given multiple of the same compiler.*"): + proj.add_compiler_data([compiler_4, compiler_5]) + + # Show error when contract type collision (only happens with inputs, else latter replaces). + compiler_4 = Compiler(name="test321", version="3.0.0", contractTypes=["bar"]) + compiler_5 = Compiler(name="test456", version="9.0.0", contractTypes=["bar"]) + with pytest.raises(ProjectError, match=r".*'bar' collision across compilers.*"): + proj.add_compiler_data([compiler_4, compiler_5]) diff --git a/tests/integration/cli/projects/with-contracts/ape-config.yaml b/tests/integration/cli/projects/with-contracts/ape-config.yaml index e6b077d800..71f741b68d 100644 --- a/tests/integration/cli/projects/with-contracts/ape-config.yaml +++ b/tests/integration/cli/projects/with-contracts/ape-config.yaml @@ -1,7 +1,7 @@ name: withcontracts dependencies: - - name: __FooDep__ + - name: foodep local: ./dep test: diff --git a/tests/integration/cli/test_compile.py b/tests/integration/cli/test_compile.py index f59f745efb..05c0ed35e4 100644 --- a/tests/integration/cli/test_compile.py +++ b/tests/integration/cli/test_compile.py @@ -51,8 +51,8 @@ def test_skip_contracts_and_missing_compilers(ape_cli, runner, project, switch_c # Simulate configuring Ape to not ignore tsconfig.json for some reason. content = """ - compiler: - ignore_files: + compile: + exclude: - "*package.json" """ with switch_config(project, content): @@ -206,6 +206,16 @@ def test_compile_specified_contracts(ape_cli, runner, project, contract_path, cl assert result.exit_code == 0, result.output assert "Compiling 'Interface.json'" in result.output + # Already compiled. + result = runner.invoke(ape_cli, ["compile", contract_path], catch_exceptions=False) + assert result.exit_code == 0, result.output + assert "Compiling 'Interface.json'" not in result.output + + # Force recompile. + result = runner.invoke(ape_cli, ["compile", contract_path, "--force"], catch_exceptions=False) + assert result.exit_code == 0, result.output + assert "Compiling 'Interface.json'" in result.output + @skip_projects_except("multiple-interfaces") def test_compile_unknown_extension_does_not_compile(ape_cli, runner, project, clean_cache): diff --git a/tests/integration/cli/test_pm.py b/tests/integration/cli/test_pm.py index af7d17b4e1..07dfa6b4ec 100644 --- a/tests/integration/cli/test_pm.py +++ b/tests/integration/cli/test_pm.py @@ -14,13 +14,17 @@ def test_install_path_not_exists(ape_cli, runner): @run_once -def test_install_path_to_local_package(ape_cli, runner): - project = "with-contracts" - path = Path(__file__).parent / "projects" / project - result = runner.invoke(ape_cli, ["pm", "install", path.as_posix(), "--name", project]) +def test_install_path_to_local_package(ape_cli, runner, project): + project_name = "with-contracts" + path = Path(__file__).parent / "projects" / project_name + name = path.stem + result = runner.invoke(ape_cli, ["pm", "install", path.as_posix(), "--name", project_name]) assert result.exit_code == 0, result.output assert f"Package '{path.as_posix()}' installed." + # Ensure was installed correctly. + assert (project.dependency_manager.DATA_FOLDER / "packages" / name).is_dir() + @run_once def test_install_path_to_local_config_file(ape_cli, runner): @@ -98,7 +102,7 @@ def test_compile(ape_cli, runner, project): assert result.exit_code == 0, result.output if project.path.as_posix().endswith("with-contracts"): - assert "Package '__FooDep__' compiled." in result.output + assert "Package 'foodep' compiled." in result.output else: # Tests against a bug where we couldn't have hyphens in # dependency project contracts. @@ -107,7 +111,7 @@ def test_compile(ape_cli, runner, project): @skip_projects_except("with-contracts") def test_compile_dependency(ape_cli, runner, project): - name = "__FooDep__" + name = "foodep" result = runner.invoke(ape_cli, ["pm", "compile", name]) assert result.exit_code == 0, result.output assert f"Package '{name}' compiled." in result.output @@ -188,11 +192,16 @@ def test_remove_cancel(ape_cli, runner): @skip_projects_except("only-dependencies") -def test_remove_invalid_version(ape_cli, runner): +def test_remove_invalid_version(ape_cli, runner, project): + package_name = "dependency-in-project-only" + # Install packages runner.invoke(ape_cli, ["pm", "install", ".", "--force"]) - package_name = "dependency-in-project-only" + # Ensure was installed correctly. + assert package_name in project.dependencies + assert (project.dependency_manager.DATA_FOLDER / "packages" / package_name).is_dir() + invalid_version = "0.0.0" result = runner.invoke(ape_cli, ["pm", "remove", package_name, invalid_version])