From 0bf118110093370d1c6f765bbba586ee7fd6671f Mon Sep 17 00:00:00 2001 From: Piper Merriam Date: Thu, 17 Nov 2016 12:37:46 -0700 Subject: [PATCH 1/2] Packaging implementation --- conftest.py | 91 +++ docs/index.rst | 1 + docs/packaging.quickstart.rst | 131 ++++ docs/packaging.rst | 15 + docs/tutorial.part-3.rst | 269 +++++++ docs/tutorial.part-4.rst | 159 +++++ docs/tutorial.rst | 2 + populus/assets/config.v3.schema.json | 26 +- populus/assets/config.v4.schema.json | 60 +- populus/assets/defaults.v4.config.json | 49 ++ populus/assets/file.proto | 16 + populus/assets/node.proto | 27 + populus/assets/package_index_abi.json | 622 ++++++++++++++++ .../assets/release-lockfile-v1.schema.json | 279 ++++++++ populus/chain/__init__.py | 6 - populus/cli/__init__.py | 1 + populus/cli/chain_cmd.py | 2 +- populus/cli/package_cmd.py | 317 +++++++++ populus/compilation/__init__.py | 52 +- populus/config/upgrade/v3.py | 9 +- .../contracts/backends/installed_packages.py | 139 ++++ populus/contracts/contract.py | 5 + populus/packages/__init__.py | 0 populus/packages/backends/__init__.py | 0 populus/packages/backends/base.py | 84 +++ populus/packages/backends/index.py | 211 ++++++ populus/packages/backends/ipfs.py | 117 +++ populus/packages/backends/lockfile.py | 38 + populus/packages/backends/manifest.py | 26 + populus/packages/build.py | 217 ++++++ populus/packages/exceptions.py | 10 + populus/packages/installation.py | 192 +++++ populus/pb/__init__.py | 0 populus/pb/ipfs_file_pb2.py | 228 ++++++ populus/project.py | 105 ++- populus/utils/cli.py | 15 +- populus/utils/compile.py | 157 +++- populus/utils/contracts.py | 41 ++ populus/utils/dependencies.py | 196 +++++ populus/utils/exception/__init__.py | 10 + populus/utils/exception/exception_py2.py | 5 + populus/utils/exception/exception_py3.py | 2 + populus/utils/functional.py | 13 + populus/utils/ipfs.py | 153 ++++ populus/utils/packaging.py | 670 ++++++++++++++++++ populus/utils/testing.py | 9 + setup.py | 5 +- tests/cli/test_deploy_cmd.py | 3 - .../test_solc_standard_json_backend.py | 62 ++ .../test_get_contract_factory.py | 16 +- .../test_installed_packages_backend.py | 139 ++++ .../test_is_dependency_contract_name.py | 19 + ...test_build_dependency_namespace_lookups.py | 62 ++ ...ive_find_installed_dependency_base_dirs.py | 35 + tests/example-packages/escrow/1.0.0.json | 127 ++++ .../escrow/contracts/Escrow.sol | 31 + .../escrow/contracts/SafeSendLib.sol | 17 + tests/example-packages/owned/1.0.0.json | 21 + .../owned/contracts/owned.sol | 11 + tests/example-packages/piper-coin/1.0.0.json | 24 + .../example-packages/safe-math-lib/1.0.0.json | 79 +++ .../safe-math-lib/contracts/SafeMathLib.sol | 28 + .../standard-token/1.0.0.json | 55 ++ .../contracts/AbstractToken.sol | 19 + .../contracts/StandardToken.sol | 83 +++ .../example-packages/transferable/1.0.0.json | 21 + .../transferable/contracts/transferable.sol | 13 + tests/example-packages/wallet/1.0.0.json | 100 +++ .../wallet/contracts/Wallet.sol | 39 + tests/fixtures/UsesOwned.sol | 9 + tests/fixtures/UsesSafeMathLib.sol | 12 + tests/fixtures/UsesStandardToken.sol | 7 + tests/functional-utils/test_star_apply.py | 11 + tests/ipfs-utils/test_create_ipfs_uri.py | 15 + .../test_extract_ipfs_path_from_uri.py | 62 ++ .../test_generate_ipfs_multihash.py | 19 + tests/ipfs-utils/test_is_ipfs_uri.py | 34 + tests/packaging-utils/conftest.py | 186 +++++ .../test_compute_identifier_tree.py | 165 +++++ .../test_construct_build_deployment_data.py | 79 +++ .../test_construct_contract_type_object.py | 59 ++ .../test_construct_dependency_identifier.py | 21 + .../test_construct_package_identifier.py | 24 + .../test_construct_release_lockfile.py | 65 ++ ...dependency_name_from_identifier_lineage.py | 44 ++ .../test_install_packages_to_project.py | 104 +++ .../test_package_identifier_helpers.py | 227 ++++++ ...st_recursive_resolution_of_package_data.py | 197 +++++ .../test_release_lockfile_validation.py | 105 +++ .../test_write_installed_packages.py | 163 +++++ tests/project/test_get_chain.py | 4 - 91 files changed, 7343 insertions(+), 55 deletions(-) create mode 100644 docs/packaging.quickstart.rst create mode 100644 docs/packaging.rst create mode 100644 docs/tutorial.part-3.rst create mode 100644 docs/tutorial.part-4.rst create mode 100644 populus/assets/file.proto create mode 100644 populus/assets/node.proto create mode 100644 populus/assets/package_index_abi.json create mode 100644 populus/assets/release-lockfile-v1.schema.json create mode 100644 populus/cli/package_cmd.py create mode 100644 populus/contracts/backends/installed_packages.py create mode 100644 populus/packages/__init__.py create mode 100644 populus/packages/backends/__init__.py create mode 100644 populus/packages/backends/base.py create mode 100644 populus/packages/backends/index.py create mode 100644 populus/packages/backends/ipfs.py create mode 100644 populus/packages/backends/lockfile.py create mode 100644 populus/packages/backends/manifest.py create mode 100644 populus/packages/build.py create mode 100644 populus/packages/exceptions.py create mode 100644 populus/packages/installation.py create mode 100644 populus/pb/__init__.py create mode 100644 populus/pb/ipfs_file_pb2.py create mode 100644 populus/utils/dependencies.py create mode 100644 populus/utils/exception/__init__.py create mode 100644 populus/utils/exception/exception_py2.py create mode 100644 populus/utils/exception/exception_py3.py create mode 100644 populus/utils/ipfs.py create mode 100644 populus/utils/packaging.py create mode 100644 tests/contract-provider/test_installed_packages_backend.py create mode 100644 tests/contracts-utils/test_is_dependency_contract_name.py create mode 100644 tests/dependencies-utils/test_build_dependency_namespace_lookups.py create mode 100644 tests/dependencies-utils/test_recursive_find_installed_dependency_base_dirs.py create mode 100644 tests/example-packages/escrow/1.0.0.json create mode 100644 tests/example-packages/escrow/contracts/Escrow.sol create mode 100644 tests/example-packages/escrow/contracts/SafeSendLib.sol create mode 100644 tests/example-packages/owned/1.0.0.json create mode 100644 tests/example-packages/owned/contracts/owned.sol create mode 100644 tests/example-packages/piper-coin/1.0.0.json create mode 100644 tests/example-packages/safe-math-lib/1.0.0.json create mode 100644 tests/example-packages/safe-math-lib/contracts/SafeMathLib.sol create mode 100644 tests/example-packages/standard-token/1.0.0.json create mode 100644 tests/example-packages/standard-token/contracts/AbstractToken.sol create mode 100644 tests/example-packages/standard-token/contracts/StandardToken.sol create mode 100644 tests/example-packages/transferable/1.0.0.json create mode 100644 tests/example-packages/transferable/contracts/transferable.sol create mode 100644 tests/example-packages/wallet/1.0.0.json create mode 100644 tests/example-packages/wallet/contracts/Wallet.sol create mode 100644 tests/fixtures/UsesOwned.sol create mode 100644 tests/fixtures/UsesSafeMathLib.sol create mode 100644 tests/fixtures/UsesStandardToken.sol create mode 100644 tests/functional-utils/test_star_apply.py create mode 100644 tests/ipfs-utils/test_create_ipfs_uri.py create mode 100644 tests/ipfs-utils/test_extract_ipfs_path_from_uri.py create mode 100644 tests/ipfs-utils/test_generate_ipfs_multihash.py create mode 100644 tests/ipfs-utils/test_is_ipfs_uri.py create mode 100644 tests/packaging-utils/conftest.py create mode 100644 tests/packaging-utils/test_compute_identifier_tree.py create mode 100644 tests/packaging-utils/test_construct_build_deployment_data.py create mode 100644 tests/packaging-utils/test_construct_contract_type_object.py create mode 100644 tests/packaging-utils/test_construct_dependency_identifier.py create mode 100644 tests/packaging-utils/test_construct_package_identifier.py create mode 100644 tests/packaging-utils/test_construct_release_lockfile.py create mode 100644 tests/packaging-utils/test_extract_dependency_name_from_identifier_lineage.py create mode 100644 tests/packaging-utils/test_install_packages_to_project.py create mode 100644 tests/packaging-utils/test_package_identifier_helpers.py create mode 100644 tests/packaging-utils/test_recursive_resolution_of_package_data.py create mode 100644 tests/packaging-utils/test_release_lockfile_validation.py create mode 100644 tests/packaging-utils/test_write_installed_packages.py diff --git a/conftest.py b/conftest.py index fb661874..4f1258ee 100644 --- a/conftest.py +++ b/conftest.py @@ -167,6 +167,89 @@ def _loaded_test_contract_fixtures(project_dir, request): shutil.copy(src_path, dst_path) +EXAMPLE_PACKAGES_BASE_PATH = './tests/example-packages' + + +@pytest.fixture() +def _loaded_installed_dependencies(populus_source_root, project_dir, request): + from populus.utils.dependencies import ( + get_installed_packages_dir, + ) + from populus.utils.filesystem import ( + find_solidity_source_files, + ) + from populus.utils.packaging import ( + load_release_lockfile, + extract_package_metadata, + ) + from populus.utils.ipfs import ( + generate_file_hash, + ) + from populus.packages.installation import ( + write_installed_packages, + ) + + packages_to_load_from_fn = getattr(request.function, '_populus_packages_to_load', []) + packages_to_load_from_module = getattr(request.module, '_populus_packages_to_load', []) + + packages_to_load = itertools.chain( + packages_to_load_from_fn, + packages_to_load_from_module, + ) + + def load_example_package_data(example_package_name): + example_package_dir = os.path.join( + populus_source_root, + EXAMPLE_PACKAGES_BASE_PATH, + example_package_name, + ) + + if not os.path.exists(example_package_dir): + raise ValueError( + "Unable to load example package '{0}".format(example_package_name) + ) + + release_lockfile_path = os.path.join(example_package_dir, '1.0.0.json') + release_lockfile_uri = generate_file_hash(release_lockfile_path) + release_lockfile = load_release_lockfile(release_lockfile_path) + source_file_paths = find_solidity_source_files(example_package_dir) + source_tree = { + os.path.relpath(source_file_path, example_package_dir): open(source_file_path).read() + for source_file_path + in source_file_paths + } + package_meta = extract_package_metadata( + [ + example_package_name, + "{0}==1.0.0".format(example_package_name), + release_lockfile_uri, + ], + release_lockfile, + ) + package_dependencies = tuple( + load_example_package_data(dependency_name) + for dependency_name + in release_lockfile.get('build_dependencies', {}).keys() + ) + + package_data = { + 'meta': package_meta, + 'lockfile': release_lockfile, + 'source_tree': source_tree, + 'dependencies': package_dependencies, + } + return package_data + + installed_packages_dir = get_installed_packages_dir(project_dir) + + package_data_to_install = tuple( + load_example_package_data(item) + for item + in packages_to_load + ) + write_installed_packages(installed_packages_dir, package_data_to_install) + + @pytest.fixture() def _updated_project_config(project_dir, request): key_value_pairs_from_fn = getattr(request.function, '_populus_config_key_value_pairs', []) @@ -189,6 +272,14 @@ def _updated_project_config(project_dir, request): project.write_config() +@pytest.fixture() +def project(project_dir, + _loaded_contract_fixtures, + _loaded_test_contract_fixtures, + _loaded_installed_dependencies): + return Project() + + def pytest_fixture_setup(fixturedef, request): """ Injects the following fixtures ahead of the `project` fixture. diff --git a/docs/index.rst b/docs/index.rst index e4188c79..67e0fa27 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,6 +19,7 @@ Contents project config chain + packaging release modules API Documentation diff --git a/docs/packaging.quickstart.rst b/docs/packaging.quickstart.rst new file mode 100644 index 00000000..b5590668 --- /dev/null +++ b/docs/packaging.quickstart.rst @@ -0,0 +1,131 @@ +Packaging Quickstart +==================== + +.. contents:: :local: + +Introduction +------------ + +Populus can be used as a package manager to interact with any ERC190 smart +contract packages. + + +Project Manifest +---------------- + +In order to take advantage of the packaging features you will first need to +create a package manifest for your project. This can either be done manually +or using the command line helper ``$ populus package init`` which will present +an interactive prompt for creating the ``ethpm.json`` file. + +.. code-block:: bash + + $ populus package init + Writing new ethpm.json file. + Package Name: fancy-greeter + Author(s) [[]]: Piper Merriam + Version [1.0.0]: + License [MIT]: + Description []: A fancy greeter contract + Keywords [[]]: greeter, greetings + Links [{}]: + Wrote package manifest: ethpm.json + + +Installing Packages +------------------- + +Packages can be installed using the ``$populus package install`` command. +Packages may be specified in the following formats. + +* ``populus package install .``: + + To install all of the declared dependencies found within the project's package manifest. + +* ``populus package install some-package-name`` + + To install a named package ``some-package-name`` sourced from a package index. + +* ``populus package install ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND`` + + To install directly from a release lockfile via IPFS + +* ``populus package install /path/to/release-lockfile.json`` + + To install directly from a release lockfile on the local filesystem. + + +Populus also supports installing packages under aliased names. This can be +used to allow multiple versions of the same package to be installed in tandem. + +* ``populus package install some-alias:some-package-name`` + + To install a named package ``some-package-name`` under the name + ``some-alias`` sourced from a package index. + +* ``populus package install some-alias@ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND`` + + To install directly from a release lockfile via IPFS using the name ``some-alias``. + +* ``populus package install some-alias@/path/to/release-lockfile.json`` + + To install directly from a release lockfile on the local filesystem using + the name ``some-alias`` + + +Packages are installed in the ``./installed_packages`` directory in the root +project directory under their aliased name, or their package name if no alias +is used. + +When a package is installed it is automatically saved to the project +dependencies within the package manifest. This can be disabled by passing in +the ``--no-save`` flag during installation. + + +Using Contracts from Installed Packages +--------------------------------------- + +Importing a contract from an installed package is done by prefixing the source +path with the name of the installed package, or the alias name if an alias was +used. + +Lets use the common *owned* pattern for an example. Suppose we have the +``owned`` package installed in our project. We know that this package has a +single solidity source file that contains the ``owned`` contract located at +``./contracts/owned.sol``. + +To import a contract from this file into local solidity source files you would +simply prefix the import path with the package name. + +.. code-block:: solidity + + pragma solidity ^0.4.0; + + import "owned/contracts/owned.sol"; + + contract MyContract is owned { + ... + } + +.. note:: + + If you install a package which either has source files which do not compile + with the solidity compiler version you are using, or which have a ``pragma + solidity`` statement which is incompatable with your version of solidity + then compilation will fail. + + +Library Linking +--------------- + +If you have a package installed which contains a library contract with a deployed instance of that library, populus will automatically find and link against that existing deployed library. One of the default contract backends that populus uses will check all installed packages + + + +Building and Publishing Releases +-------------------------------- + +Populus can be used to build and publish packages to The Ethereum Package +Registry or any registry which implements a compatable API. + +To build a release use the ``$ populus package build`` command. diff --git a/docs/packaging.rst b/docs/packaging.rst new file mode 100644 index 00000000..d9cd707d --- /dev/null +++ b/docs/packaging.rst @@ -0,0 +1,15 @@ +Packaging +========= + +.. contents:: :local: + +.. warning:: The packaging functionality is highly experimental. All APIs are subject to change without notice. + + +Contents +-------- + +.. toctree:: + :maxdepth: 1 + + packaging.quickstart diff --git a/docs/tutorial.part-3.rst b/docs/tutorial.part-3.rst new file mode 100644 index 00000000..fc88d507 --- /dev/null +++ b/docs/tutorial.part-3.rst @@ -0,0 +1,269 @@ +Part 3: Installing a Package +============================ + +.. contents:: :local: + + +Introduction +------------ + +In this tutorial we will be creating our own mintable `ERC20`_ token. However, +instead of writing our own ERC20 implementation we'll be taking advantage of an +existing implementation through the use of populus's package management +features. + + +Setting up the project folder +----------------------------- + + +Create a new directory for your project and run ``$ populus project init`` to +populate the initial project structure. + +.. code-block:: bash + + $ populus init + Wrote default populus configuration to `./populus.json`. + Created Directory: ./contracts + Created Example Contract: ./contracts/Greeter.sol + Created Directory: ./tests + Created Example Tests: ./tests/test_greeter.py + +Now, delete the ``./contracts/Greeter.sol`` and ``./tests/test_greeter.py`` +files as we won't be using the *Greeter* contracts in this tutorial. + +Once you've removed those files create a new solidity source file +``./contracts/MintableToken.sol`` and paste in the following solidity source +code. + +.. code-block:: solidity + + pragma solidity ^0.4.0; + + import {owned} from "example-package-owned/contracts/owned.sol"; + import {StandardToken} from "example-package-standard-token/contracts/StandardToken.sol"; + + contract MintableToken is StandardToken(0), owned { + function mint(address who, uint value) public onlyowner returns (bool) { + balances[who] += value; + totalSupply += value; + Transfer(0x0, who, value); + return true; + } + } + +If you are familiar with solidity, the two import statements should stand out +to you. These two statements will currently cause an error during compilation. +Let's see. + +.. code-block:: bash + + $ populus compile + ============ Compiling ============== + > Loading source files from: ./contracts + + Traceback (most recent call last): + ... + > command: `solc --optimize --combined-json bin,bin-runtime,abi,devdoc,userdoc contracts/MintableToken.sol` + > return code: `1` + > stderr: + + > stdout: + contracts/MintableToken.sol:3:1: Error: Source "example-package-owned/contracts/owned.sol" not found: File not found. + import {owned} from "example-package-owned/contracts/owned.sol"; + ^--------------------------------------------------------------^ + contracts/MintableToken.sol:4:1: Error: Source "example-package-standard-token/contracts/StandardToken.sol" not found: File not found. + import {StandardToken} from "example-package-standard-token/contracts/StandardToken.sol"; + ^---------------------------------------------------------------------------------------^ + +The solidity compiler clearly gets angry that we're trying to import files that +don't exist. In order to install these file and make solidity happy we'll +first need to generate a package manifest using the ``$ populus package init`` +command. + +.. code-block:: + + $ populus package init + Writing new ethpm.json file. + Package Name: mintable-standard-token + Author(s) [[]]: Piper Merriam + Version [1.0.0]: + License [MIT]: + Description []: Mintable ERC20 token contract + Keywords [[]]: ERC20, tokens + Links [{}]: + Wrote package manifest: ethpm.json + +You will be presented with an interactive prompt to populus various pieces of +project information. There will now be a new file in the root of your project +named ``ethpm.json`` that should look something like this. + +.. code-block:: javascript + + { + "authors": [ + "Piper Merriam " + ], + "description": "Mintable ERC20 token contract", + "keywords": [ + "ERC20", + "tokens" + ], + "license": "MIT", + "links": {}, + "manifest_version": "1", + "package_name": "mintable-standard-token", + "version": "1.0.0" + } + +Now we are ready to install some dependencies using the ``$ populus package +install`` command. We want to install both the ``example-package-owned`` and +``example-package-standard-token`` packages. + +.. code-block:: bash + + $ populus package install example-package-owned example-package-standard-token + Installed Packages: owned, standard-token + +If you look in your project directory you should also see a new folder +``./installed_packages``. + +.. code-block:: bash + + $ tree . + . + ├── contracts + │   └── MintableToken.sol + ├── ethpm.json + ├── installed_packages + │   ├── example-package-owned + │   │   ├── build_identifier.lock + │   │   ├── contracts + │   │   │   └── owned.sol + │   │   ├── install_identifier.lock + │   │   ├── installed_packages + │   │   └── lock.json + │   └── example-package-standard-token + │   ├── build_identifier.lock + │   ├── contracts + │   │   ├── AbstractToken.sol + │   │   └── StandardToken.sol + │   ├── install_identifier.lock + │   ├── installed_packages + │   └── lock.json + ├── populus.json + └── tests + + 9 directories, 12 files + + +And if you look in your ``ethpm.json`` file you should see two dependencies. + + +.. code-block:: javascript + + { + "authors": [ + "Piper Merriam " + ], + "dependencies": { + "example-package-owned": "1.0.0", + "example-package-standard-token": "1.0.0" + }, + "description": "Mintable ERC20 token contract", + "keywords": [ + "ERC20", + "tokens" + ], + "license": "MIT", + "links": {}, + "manifest_version": "1", + "package_name": "mintable-token", + "version": "1.0.0" + } + +Now, we can try to compile our project again and everything should work. + + +.. code-block:: bash + + $ populus compile + ============ Compiling ============== + > Loading source files from: ./contracts + + > Found 1 contract source files + - contracts/MintableToken.sol + + > Compiled 4 contracts + - MintableToken + - StandardToken + - Token + - owned + + > Wrote compiled assets to: ./build/contracts.json/contracts.json + +Lets go ahead and write a quick test for our new minting functionality. Add +the following test code to a new file ``./tests/test_token_minting.py`` + +.. code-block:: python + + import pytest + + def test_minting_tokens(chain, accounts): + provider = chain.provider + mintable_token, deploy_txn_hash = provider.get_or_deploy_contract( + 'MintableToken', + deploy_kwargs={"_totalSupply": 0}, + ) + + assert mintable_token.call().balanceOf(accounts[0]) == 0 + assert mintable_token.call().balanceOf(accounts[1]) == 0 + assert mintable_token.call().totalSupply() == 0 + + chain.wait.for_receipt(mintable_token.transact().mint( + who=accounts[0], + value=12345, + )) + chain.wait.for_receipt(mintable_token.transact().mint( + who=accounts[1], + value=54321, + )) + + assert mintable_token.call().balanceOf(accounts[0]) == 12345 + assert mintable_token.call().balanceOf(accounts[1]) == 54321 + assert mintable_token.call().totalSupply() == 66666 + + def test_only_owner_can_mint(chain, accounts): + provider = chain.provider + mintable_token, deploy_txn_hash = provider.get_or_deploy_contract( + 'MintableToken', + deploy_kwargs={"_totalSupply": 0}, + ) + + with pytest.raises(Exception): + mintable_token.transact({'from': accounts[1]}).mint( + who=accounts[0], + value=12345, + ) + + +And you can the tests with the ``py.test`` command. + +.. code-block:: bash + + $ py.test tests/ + ========================= test session starts ======================== + platform darwin -- Python 3.5.2, pytest-3.0.4, py-1.4.31, pluggy-0.4.0 + rootdir: /Users/piper/sites/scratch/populus-tutorial-3, inifile: + plugins: populus-1.5.0 + collected 2 items + + tests/test_token_minting.py .. + + ======================= 2 passed in 0.74 seconds ===================== + + +Fin. + + +.. _ERC20: https://github.com/ethereum/EIPs/issues/20 diff --git a/docs/tutorial.part-4.rst b/docs/tutorial.part-4.rst new file mode 100644 index 00000000..77f3b206 --- /dev/null +++ b/docs/tutorial.part-4.rst @@ -0,0 +1,159 @@ +Part 4: Publishing a Package +============================ + +.. contents:: :local: + + +Introduction +------------ + +In the previous tutorial we explored installing packages and using the +contracts from those packages in our project. + +This tutorial will pick up where that one left off. We will be publishing our +``mintable-standard-token`` package to `The Package Registry`_. + + +Configuring Populus for Publishing +---------------------------------- + +In order to publish our package you will need to add some configuration to the +the ``RopstenPackageIndexBackend`` which can be found in the ``populus.json`` +file in the root of the project. It *should* currently look like this. + +.. code-block:: javascript + + "RopstenPackageIndexBackend": { + "class": "populus.packages.backends.index.PackageIndexBackend", + "priority": 40, + "settings": { + "package_index_address": "0x8011df4830b4f696cd81393997e5371b93338878", + "web3-for-install": { + "$ref": "web3.InfuraRopsten" + } + } + } + + +We're going to add the key ``web3-for-publish`` to the ``settings`` portion of +this config. Populus will need to be able to send transactions through the +configured web3 instances. For the purposes of this tutorial you will need to +run a ``geth`` node that is connected to the *Ropsten* testnetwork with an +unlocked account. Modify the config to look like the following, but with your +address substituted in place the address +``0xaffa9e11a8deac514b93169c764aa042b4fe316f`` and the path to your +``geth.ipc`` file for the running ropsten instance. + +.. code-block:: javascript + + "RopstenPackageIndexBackend": { + "class": "populus.packages.backends.index.PackageIndexBackend", + "priority": 40, + "settings": { + "package_index_address": "0x8011df4830b4f696cd81393997e5371b93338878", + "web3-for-install": { + "$ref": "web3.InfuraRopsten" + }, + "web3-for-publish": { + "provider": { + "class": "web3.providers.ipc.IPCProvider", + "settings": { + "ipc_path": "/Users/piper/Library/Ethereum/ropsten/geth.ipc" + } + }, + "eth": { + "default_account": "0xaffa9e11a8deac514b93169c764aa042b4fe316f" + } + } + } + } + + +Configuring your package for publishing +--------------------------------------- + +The next thing you'll need to do is rename your package to something other than +``mintable-standard-token`` as that package name is already registered on the +package index. The package name is set in the ``ethpm.json`` file located in the +root of the project. + + +Building the release lockfile +----------------------------- + +To build the package we will use the ``$ populus package build`` command. We +want to include our ``MintableToken`` contract in the release. Use the +following command to build the release lockfile. + +.. code-block:: bash + + $ populus package build --contract-type MintableToken + Wrote release lock file: build/1.0.0.json + +If you open up the built release lockfile ``./build/1.0.0.json`` you should see something similar to the following (which was truncated for readability sake). + +.. code-block:: javascript + + { + "build_dependencies": { + "example-package-owned": "ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND", + "example-package-standard-token": "ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ" + }, + "contract_types": { + "MintableToken": { + "abi": [ + .. + ], + "bytecode": "0x60606040525b60....", + "contract_name": "MintableToken", + "natspec": { + "methods": { + "balanceOf(address)": { + "details": "Returns number of tokens owned by given address.", + "params": { + "_owner": "Address of token owner." + } + }, + ... + } + }, + "runtime_bytecode": "0x606060405236156..." + } + }, + "lockfile_version": "1", + "meta": { + "authors": [ + "Piper Merriam " + ], + "description": "Mintable ERC20 token contract", + "keywords": [ + "ERC20", + "tokens" + ], + "license": "MIT", + "links": {} + }, + "package_name": "mintable-standard-token", + "sources": { + "./contracts/MintableToken.sol": "ipfs://QmWUWwXdR6d5BycZYoDVyv4gkEEYkv9ixwQpLoePLNGPBE" + }, + "version": "1.0.0" + } + + +Publishing the release lockfile +------------------------------- + +The last step is to publish the release lockfile. This is done with the ``$ +populus package publish`` command. + +.. code-block:: bash + + $ populus package publish build/1.0.0.json + Publishing to RopstenPackageIndexBackend + + +If you wait for the transaction to be confirmed and head over to `The Package +Registry`_ you should see your newly published package in the package index. + +.. _The Package Registry: http://www.ethpm.com/ diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 88d15fc7..37c7fc74 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -12,3 +12,5 @@ Contents tutorial.part-1 tutorial.part-2 + tutorial.part-3 + tutorial.part-4 diff --git a/populus/assets/config.v3.schema.json b/populus/assets/config.v3.schema.json index 50a2bb17..1c2ff52e 100644 --- a/populus/assets/config.v3.schema.json +++ b/populus/assets/config.v3.schema.json @@ -17,6 +17,13 @@ "type": "string", "enum": ["3"] }, + "web3": { + "$ref": "#/definitions/ProjectWeb3Config" + }, + "version": { + "type": "string", + "enum": ["3"] + }, "web3": { "$ref": "#/definitions/ProjectWeb3Config" } @@ -39,7 +46,7 @@ "ChainConfig": { "title": "Configuration for a single project chain", "type": "object", - "required": ["chain", "web3"], + "required": ["chain", "web3", "contracts"], "properties": { "chain": {"$ref": "#/definitions/ChainClassConfig"}, "web3": { @@ -47,6 +54,23 @@ {"$ref": "#/definitions/Reference"}, {"$ref": "#/definitions/Web3Config"} ] + }, + "contracts": { + "title": "Configuration for the contract backends this chain will use", + "type": "object", + "required": ["backends"], + "properties": { + "backends": { + "patternProperties": { + "^[a-zA-Z0-9][-_a-zA-Z0-9]*$": { + "anyOf": [ + {"$ref": "#/definitions/Reference"}, + {"$ref": "#/definitions/ContractBackendConfig"} + ] + } + } + } + } } } }, diff --git a/populus/assets/config.v4.schema.json b/populus/assets/config.v4.schema.json index f335ea01..5ceb1b14 100644 --- a/populus/assets/config.v4.schema.json +++ b/populus/assets/config.v4.schema.json @@ -13,6 +13,9 @@ "contracts": { "$ref": "#/definitions/ContractsConfig" }, + "packaging": { + "$ref": "#/definitions/PackagingConfig" + }, "version": { "type": "string", "enum": ["4"] @@ -47,6 +50,23 @@ {"$ref": "#/definitions/Reference"}, {"$ref": "#/definitions/Web3Config"} ] + }, + "contracts": { + "title": "Configuration for the contract backends this chain will use", + "type": "object", + "required": ["backends"], + "properties": { + "backends": { + "patternProperties": { + "^[a-zA-Z0-9][-_a-zA-Z0-9]*$": { + "anyOf": [ + {"$ref": "#/definitions/Reference"}, + {"$ref": "#/definitions/ContractBackendConfig"} + ] + } + } + } + } } } }, @@ -86,12 +106,10 @@ "title": "Configuration for a compiler backend", "type": "object", "properties": { - "class": { - "$ref": "#/definitions/PythonImportPath" - }, "settings": { - "title": "Configuration key-value pairs to be passed into the compiler backend", - "type": "object" + "anyOf": [ + {"$ref": "#/definitions/SolcCompilerSettings"} + ] } } }, @@ -132,6 +150,38 @@ "type": "string", "pattern": "^0x[0-9a-fA-F]{40}$" }, + "PackageBackendConfig": { + "title": "Configuration for a package backend", + "type": "object", + "properties": { + "class": { + "$ref": "#/definitions/PythonImportPath" + }, + "priority": { + "title": "The priority of this backend", + "type": "integer" + }, + "settings": { + "title": "Backend specific settings", + "type": "object" + } + } + }, + "PackagingConfig": { + "title": "Configuration for package installation and management", + "type": "object", + "properties": { + "backends": { + "title": "Global configurations for project backends which can be used by project chains", + "additionalProperties": false, + "patternProperties": { + "^[a-zA-Z0-9][-_a-zA-Z0-9]*$": { + "$ref": "#/definitions/PackageBackendConfig" + } + } + } + } + }, "ProjectChainsConfig": { "title": "The configuration for each chain declared by this project", "type": "object", diff --git a/populus/assets/defaults.v4.config.json b/populus/assets/defaults.v4.config.json index 8ce1f7fa..6141704d 100644 --- a/populus/assets/defaults.v4.config.json +++ b/populus/assets/defaults.v4.config.json @@ -10,6 +10,9 @@ }, "contracts": { "backends": { + "InstalledPackages": { + "$ref": "contracts.backends.InstalledPackages" + }, "JSONFile": { "$ref": "contracts.backends.JSONFile" }, @@ -34,6 +37,9 @@ }, "contracts": { "backends": { + "InstalledPackages": { + "$ref": "contracts.backends.InstalledPackages" + }, "JSONFile": { "$ref": "contracts.backends.JSONFile" }, @@ -58,6 +64,9 @@ }, "contracts": { "backends": { + "InstalledPackages": { + "$ref": "contracts.backends.InstalledPackages" + }, "Memory": { "$ref": "contracts.backends.Memory" }, @@ -79,6 +88,9 @@ }, "contracts": { "backends": { + "InstalledPackages": { + "$ref": "contracts.backends.InstalledPackages" + }, "Memory": { "$ref": "contracts.backends.Memory" }, @@ -100,6 +112,9 @@ }, "contracts": { "backends": { + "InstalledPackages": { + "$ref": "contracts.backends.InstalledPackages" + }, "Memory": { "$ref": "contracts.backends.Memory" }, @@ -126,6 +141,10 @@ "class": "populus.contracts.backends.project.ProjectContractsBackend", "priority": 20 }, + "InstalledPackages": { + "class": "populus.contracts.backends.installed_packages.InstalledPackagesBackend", + "priority": 30 + }, "TestContracts": { "class": "populus.contracts.backends.testing.TestContractsBackend", "priority": 40 @@ -152,6 +171,36 @@ "contracts_source_dir": "./contracts", "import_remappings": [] }, + "packaging": { + "backends": { + "IPFSBackend": { + "class": "populus.packages.backends.ipfs.IPFSPackageBackend", + "priority": 30, + "settings": { + "host": "https://ipfs.infura.io", + "port": 5001 + } + }, + "LocalFilesystemLockfileBackend": { + "class": "populus.packages.backends.lockfile.LocalFilesystemLockfileBackend", + "priority": 20 + }, + "LocalManifestBackend": { + "class": "populus.packages.backends.manifest.LocalManifestBackend", + "priority": 10 + }, + "RopstenPackageIndexBackend": { + "class": "populus.packages.backends.index.PackageIndexBackend", + "priority": 40, + "settings": { + "web3-for-install": { + "$ref": "web3.InfuraRopsten" + }, + "package_index_address": "0x8011df4830b4f696cd81393997e5371b93338878" + } + } + } + }, "web3": { "GethIPC": { "provider": { diff --git a/populus/assets/file.proto b/populus/assets/file.proto new file mode 100644 index 00000000..5e91d7e4 --- /dev/null +++ b/populus/assets/file.proto @@ -0,0 +1,16 @@ +syntax = "proto2"; + +message Data { + enum DataType { + Raw = 0; + Directory = 1; + File = 2; + Metadata = 3; + Symlink = 4; + } + + required DataType Type = 1; + optional bytes Data = 2; + optional uint64 filesize = 3; + repeated uint64 blocksizes = 4; +} diff --git a/populus/assets/node.proto b/populus/assets/node.proto new file mode 100644 index 00000000..4ec6ac64 --- /dev/null +++ b/populus/assets/node.proto @@ -0,0 +1,27 @@ +syntax = "proto2"; + +import "code.google.com/p/gogoprotobuf/gogoproto/gogo.proto"; + +option (gogoproto.gostring_all) = true; +option (gogoproto.equal_all) = true; +option (gogoproto.verbose_equal_all) = true; +option (gogoproto.goproto_stringer_all) = false; +option (gogoproto.stringer_all) = true; +option (gogoproto.populate_all) = true; +option (gogoproto.testgen_all) = true; +option (gogoproto.benchgen_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.sizer_all) = true; +option (gogoproto.unmarshaler_all) = true; + +message PBLink { + optional bytes Hash = 1; + optional string Name = 2; + optional uint64 Tsize = 3; +} + +message PBNode { + repeated PBLink Links = 2; + + optional bytes Data = 1; +} diff --git a/populus/assets/package_index_abi.json b/populus/assets/package_index_abi.json new file mode 100644 index 00000000..dcdda37a --- /dev/null +++ b/populus/assets/package_index_abi.json @@ -0,0 +1,622 @@ +[ + { + "constant": true, + "inputs": [], + "name": "getNumReleases", + "outputs": [ + { + "name": "", + "type": "uint256" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newReleaseValidator", + "type": "address" + } + ], + "name": "setReleaseValidator", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + }, + { + "name": "offset", + "type": "uint256" + }, + { + "name": "numReleases", + "type": "uint256" + } + ], + "name": "getPackageReleaseHashes", + "outputs": [ + { + "name": "", + "type": "bytes32[]" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newOwner", + "type": "address" + } + ], + "name": "setOwner", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + }, + { + "name": "major", + "type": "uint32" + }, + { + "name": "minor", + "type": "uint32" + }, + { + "name": "patch", + "type": "uint32" + }, + { + "name": "preRelease", + "type": "string" + }, + { + "name": "build", + "type": "string" + } + ], + "name": "getReleaseLockfileURI", + "outputs": [ + { + "name": "", + "type": "string" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "getPackageDb", + "outputs": [ + { + "name": "", + "type": "address" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newPackageDb", + "type": "address" + } + ], + "name": "setPackageDb", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + }, + { + "name": "major", + "type": "uint32" + }, + { + "name": "minor", + "type": "uint32" + }, + { + "name": "patch", + "type": "uint32" + }, + { + "name": "preRelease", + "type": "string" + }, + { + "name": "build", + "type": "string" + } + ], + "name": "releaseExists", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "getReleaseValidator", + "outputs": [ + { + "name": "", + "type": "address" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "releaseHash", + "type": "bytes32" + } + ], + "name": "getReleaseData", + "outputs": [ + { + "name": "major", + "type": "uint32" + }, + { + "name": "minor", + "type": "uint32" + }, + { + "name": "patch", + "type": "uint32" + }, + { + "name": "preRelease", + "type": "string" + }, + { + "name": "build", + "type": "string" + }, + { + "name": "releaseLockfileURI", + "type": "string" + }, + { + "name": "createdAt", + "type": "uint256" + }, + { + "name": "updatedAt", + "type": "uint256" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "getAllReleaseHashes", + "outputs": [ + { + "name": "", + "type": "bytes32[]" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "name", + "type": "string" + }, + { + "name": "newPackageOwner", + "type": "address" + } + ], + "name": "transferPackageOwner", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "idx", + "type": "uint256" + } + ], + "name": "getReleaseHash", + "outputs": [ + { + "name": "", + "type": "bytes32" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "name", + "type": "string" + }, + { + "name": "major", + "type": "uint32" + }, + { + "name": "minor", + "type": "uint32" + }, + { + "name": "patch", + "type": "uint32" + }, + { + "name": "preRelease", + "type": "string" + }, + { + "name": "build", + "type": "string" + }, + { + "name": "releaseLockfileURI", + "type": "string" + } + ], + "name": "release", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "getNumPackages", + "outputs": [ + { + "name": "", + "type": "uint256" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "offset", + "type": "uint256" + }, + { + "name": "numReleases", + "type": "uint256" + } + ], + "name": "getReleaseHashes", + "outputs": [ + { + "name": "", + "type": "bytes32[]" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newAuthority", + "type": "address" + } + ], + "name": "setAuthority", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + } + ], + "name": "packageExists", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "owner", + "outputs": [ + { + "name": "", + "type": "address" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "authority", + "outputs": [ + { + "name": "", + "type": "address" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + } + ], + "name": "getPackageData", + "outputs": [ + { + "name": "packageOwner", + "type": "address" + }, + { + "name": "createdAt", + "type": "uint256" + }, + { + "name": "numReleases", + "type": "uint256" + }, + { + "name": "updatedAt", + "type": "uint256" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + } + ], + "name": "getAllPackageReleaseHashes", + "outputs": [ + { + "name": "", + "type": "bytes32[]" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "name", + "type": "string" + }, + { + "name": "releaseIdx", + "type": "uint256" + } + ], + "name": "getReleaseHashForPackage", + "outputs": [ + { + "name": "", + "type": "bytes32" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "idx", + "type": "uint256" + } + ], + "name": "getPackageName", + "outputs": [ + { + "name": "", + "type": "string" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "newReleaseDb", + "type": "address" + } + ], + "name": "setReleaseDb", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "getReleaseDb", + "outputs": [ + { + "name": "", + "type": "address" + } + ], + "payable": false, + "type": "function" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "nameHash", + "type": "bytes32" + }, + { + "indexed": true, + "name": "releaseHash", + "type": "bytes32" + } + ], + "name": "PackageRelease", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "oldOwner", + "type": "address" + }, + { + "indexed": true, + "name": "newOwner", + "type": "address" + } + ], + "name": "PackageTransfer", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "oldOwner", + "type": "address" + }, + { + "indexed": true, + "name": "newOwner", + "type": "address" + } + ], + "name": "OwnerUpdate", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "oldAuthority", + "type": "address" + }, + { + "indexed": true, + "name": "newAuthority", + "type": "address" + } + ], + "name": "AuthorityUpdate", + "type": "event" + } +] diff --git a/populus/assets/release-lockfile-v1.schema.json b/populus/assets/release-lockfile-v1.schema.json new file mode 100644 index 00000000..4318aab2 --- /dev/null +++ b/populus/assets/release-lockfile-v1.schema.json @@ -0,0 +1,279 @@ +{ + "title": "Release Lock File Specification", + "type": "object", + "required": [ + "lockfile_version", + "package_name", + "version" + ], + "version": "1", + "additionalProperties": false, + "properties": { + "lockfile_version": { + "type": "string", + "title": "Lock File Version", + "default": "1", + "enum": ["1"] + }, + "package_name": { + "title": "The name of the package that this release is for", + "type": "string", + "pattern": "^[a-z][-a-z0-9]{0,213}$" + }, + "meta": { + "$ref": "#/definitions/PackageMeta" + }, + "version": { + "title": "Version", + "type": "string" + }, + "sources": { + "title": "Sources", + "type": "object", + "patternProperties": { + "\\.\\/.*": { + "anyOf": [ + { + "title": "Source code", + "type": "string" + }, + { + "$ref": "IPFS-URI" + } + ] + } + } + }, + "contract_types": { + "title": "The contract types included in this release", + "type": "object", + "patternProperties": { + "[a-zA-Z][-a-zA-Z0-9_]*(?:\\[[-a-zA-Z0-9]{1,256}\\])$": { + "$ref": "#/definitions/ContractType" + } + } + }, + "deployments": { + "title": "The deployed contract instances in this release", + "type": "object", + "patternProperties": { + "^blockchain\\://[0-9a-zA-Z]{64}/block/[0-9a-zA-Z]{64}$": { + "type": "object", + "patternProperties": { + "^[a-zA-Z][a-zA-Z0-9_]*$": { + "$ref": "#/definitions/ContractInstance" + } + } + } + } + }, + "build_dependencies": { + "title": "Build Dependencies", + "type": "object", + "patternProperties": { + "^[a-z][-a-z0-9]{0,213}$": { + "$ref": "#/definitions/IPFS-URI" + } + } + } + }, + "definitions": { + "PackageMeta": { + "title": "Metadata about the package", + "type": "object", + "properties": { + "authors": { + "title": "Package authors", + "type": "array", + "items": { + "type": "string" + } + }, + "license": { + "title": "The license that this package and it's source are released under", + "type": "string" + }, + "description": { + "title": "Description of this package", + "type": "string" + }, + "keywords": { + "title": "Keywords that apply to this package", + "type": "array", + "items": { + "type": "string" + } + }, + "links": { + "title": "URIs for resources related to this package", + "type": "object", + "additionalProperties": { + "type": "string", + "format": "URI" + } + } + } + }, + "ContractType": { + "title": "Data for a contract type included in this package", + "type": "object", + "properties":{ + "contract_name": { + "title": "The name for this contract type as found in the project source code.", + "type": "string", + "pattern": "[a-zA-Z][a-zA-Z0-9_]*" + }, + "bytecode": { + "title": "The unlinked '0x' prefixed bytecode for this contract type", + "type": "string" + }, + "runtime_bytecode": { + "title": "The unlinked '0x' prefixed runtime portion of the bytecode for this contract type", + "type": "string" + }, + "abi": { + "title": "The ABI for this contract type", + "type": "array" + }, + "natspec": { + "title": "The combined user-doc and dev-doc for this contract", + "type": "object" + }, + "compiler": { + "$ref": "#/definitions/CompilerInformation" + } + } + }, + "ContractInstance": { + "title": "Data for a deployed instance of a contract", + "type": "object", + "required": [ + "contract_type" + ], + "properties": { + "contract_type": { + "title": "The contract type of this contract instance", + "type": "string", + "pattern": "^(?:[a-z][-a-z0-9]{0,213}\\:)?[a-zA-Z][-a-zA-Z0-9_]*(?:\\[[-a-zA-Z0-9]{1,256}\\])?$" + }, + "address": { + "$ref": "#/definitions/Address" + }, + "transaction": { + "$ref": "#/definitions/TransactionHash" + }, + "block": { + "$ref": "#/definitions/BlockHash" + }, + "runtime_bytecode": { + "title": "The on-chain bytecode for this contract instance.", + "type": "string" + }, + "compiler": { + "$ref": "#/definitions/CompilerInformation" + }, + "link_dependencies": { + "title": "The values for the link references found within this contract instances runtime bytecode", + "type": "array", + "items": { + "$ref": "#/definitions/LinkValue" + } + } + } + }, + "LinkValue": { + "title": "A value for an individual link reference in a contract's bytecode", + "type": "object", + "required": [ + "offset", + "value" + ], + "properties": { + "offset": { + "type": "integer", + "minimum": 0 + }, + "value": { + "title": "The value for the link reference", + "type": "string", + "anyOf": [ + {"$ref": "#/definitions/Address"}, + {"$ref": "#/definitions/ContractInstanceName"}, + {"$ref": "#/definitions/PackageContractInstanceName"} + ] + } + } + }, + "ContractInstanceName": { + "title": "The name of the deployed contract instance", + "type": "string", + "pattern": "^[a-zA-Z][a-zA-Z0-9_]*$" + }, + "PackageContractInstanceName": { + "title": "The name of the deployed contract instance in a package", + "type": "string", + "pattern": "^([a-z][-a-z0-9]{0,213}\\:)+[a-zA-Z][a-zA-Z0-9_]*$" + }, + "CompilerInformation": { + "title": "Information about the software that was used to compile a contract type or instance", + "type": "object", + "required": [ + "type", + "version" + ], + "properties": { + "type": { + "title": "The name of the compiler", + "enum": [ + "solc", + "solcjs" + ] + }, + "version": { + "title": "The version string for the compiler", + "type": "string" + }, + "settings": { + "title": "The settings used for compilation", + "anyOf": [ + {"$ref": "solc_Settings"} + ] + } + } + }, + "solc_Settings": { + "title": "Settings for use with the solc or solcjs compiler", + "type": "object", + "properties": { + "optimize": { + "type": "boolean" + }, + "optimize_runs": { + "type": "integer", + "minimum": 1 + } + } + }, + "Address": { + "title": "An Ethereum address", + "type": "string", + "pattern": "^0x[0-9a-fA-F]{40}$" + }, + "TransactionHash": { + "title": "An Ethereum transaction hash", + "type": "string", + "pattern": "^0x[0-9a-zA-Z]{64}$" + }, + "BlockHash": { + "title": "An Ethereum block hash", + "type": "string", + "pattern": "^0x[0-9a-zA-Z]{64}$" + }, + "IPFS-URI": { + "title": "An IPFS URI", + "type": "string", + "format": "uri", + "pattern": "^ipfs:/?/?.*$" + } + } +} diff --git a/populus/chain/__init__.py b/populus/chain/__init__.py index a0294d8f..c3a235d1 100644 --- a/populus/chain/__init__.py +++ b/populus/chain/__init__.py @@ -1,9 +1,3 @@ -from populus.contracts.exceptions import ( # noqa: F401 - NoKnownAddress, - UnknownContract, - BytecodeMismatch, -) - from .geth import ( # noqa: F401 BaseGethChain, LocalGethChain, diff --git a/populus/cli/__init__.py b/populus/cli/__init__.py index 483491be..8d4dcb7a 100644 --- a/populus/cli/__init__.py +++ b/populus/cli/__init__.py @@ -5,3 +5,4 @@ from .config_cmd import config_cmd # NOQA from .deploy_cmd import deploy_cmd # NOQA from .init_cmd import init_cmd # NOQA +from .package_cmd import package_cmd # NOQA diff --git a/populus/cli/chain_cmd.py b/populus/cli/chain_cmd.py index 6146a5bc..eaac08fb 100644 --- a/populus/cli/chain_cmd.py +++ b/populus/cli/chain_cmd.py @@ -2,7 +2,7 @@ import click -from populus.chain import ( +from populus.chain.geth import ( BaseGethChain, ) diff --git a/populus/cli/package_cmd.py b/populus/cli/package_cmd.py new file mode 100644 index 00000000..9d367b9d --- /dev/null +++ b/populus/cli/package_cmd.py @@ -0,0 +1,317 @@ +import click +import json +import logging +import os + +from eth_utils import ( + compose, + to_dict, +) + +from populus.packages.build import ( + persist_package_file, + construct_release_lockfile, +) +from populus.packages.installation import ( + install_packages_to_project, + update_project_dependencies, +) + +from populus.utils.filesystem import ( + ensure_path_exists, +) +from populus.utils.dependencies import ( + get_lockfile_build_path, +) +from populus.utils.packaging import ( + SUPPORTED_PACKAGE_MANIFEST_VERSIONS, + validate_package_manifest, + validate_release_lockfile, + get_publishable_backends, + load_release_lockfile, + write_release_lockfile, +) + +from .main import main + + +@main.group('package') +@click.pass_context +def package_cmd(ctx): + """ + Package management commands. + """ + pass + + +def split_on_commas(values): + return [value.strip() for value in values.split(',') if value] + + +@to_dict +def split_on_colons(values): + for kv in values: + key, _, value = kv.partition(':') + yield key, value + + +@package_cmd.command('init') +@click.pass_context +def package_init(ctx): + """ + Initialize the `ethpm.json` file. + """ + logger = logging.getLogger('populus.cli.init') + project = ctx.obj['PROJECT'] + + if project.has_package_manifest: + overwrite_msg = ( + "An `ethpm.json` file is already present. If you proceed your choices " + "will overwrite any existing values" + ) + if not click.confirm(overwrite_msg, default=False): + ctx.exit(1) + package_manifest = project.package_manifest + else: + package_manifest = {} + + package_manifest.setdefault('manifest_version', '1') + + if package_manifest['manifest_version'] not in SUPPORTED_PACKAGE_MANIFEST_VERSIONS: + raise ValueError( + "Unsupported manifest version. Supported versions are {0}".format( + ", ".join( + version + for version + in sorted(SUPPORTED_PACKAGE_MANIFEST_VERSIONS) + ) + ) + ) + + if project.has_package_manifest: + logger.info("Updating existing ethpm.json file.") + else: + logger.info("Writing new ethpm.json file.") + + # TODO: pull from git configuration if present. + package_manifest['package_name'] = click.prompt( + 'Package Name', + default=package_manifest.get('package_name'), + ) + + # TODO: pull default email from git configuration. + package_manifest['authors'] = click.prompt( + 'Author(s)', + value_proc=split_on_commas, + default=package_manifest.get('authors', []), + ) + + package_manifest['version'] = click.prompt( + 'Version', + default=package_manifest.get('version', '1.0.0'), + ) + + # TODO: auto detect this from a LICENSE file if present. + package_manifest['license'] = click.prompt( + 'License', + default=package_manifest.get('license', 'MIT'), + ) + + package_manifest['description'] = click.prompt( + 'Description', + default=package_manifest.get('description', ''), + ) + + package_manifest['keywords'] = click.prompt( + 'Keywords', + value_proc=split_on_commas, + default=package_manifest.get('keywords', []), + ) + + package_manifest['links'] = click.prompt( + 'Links', + value_proc=compose(split_on_commas, split_on_colons), + default=package_manifest.get('links', {}), + ) + + with open(project.package_manifest_path, 'w') as package_manifest_file: + json.dump(package_manifest, package_manifest_file, sort_keys=True, indent=2) + + logger.info("Wrote package manifest: %s", project.package_manifest_path) + + +@package_cmd.command('install') +@click.argument('package_identifiers', nargs=-1) +@click.option('--save/--no-save', default=True, help="Save package into manifest dependencies") +@click.pass_context +def package_install(ctx, package_identifiers, save): + """ + Install package(s). + + 1. Load package manifest. + + TODO: figure out what the right steps are for this. Should probably be a + multi-phase thing which first resolves all of the identifiers, then + resolves all dependencies for each identifier, then does the actual + installation. + """ + logger = logging.getLogger('populus.cli.install') + project = ctx.obj['PROJECT'] + + if not package_identifiers: + package_identifiers = ('.',) + + installed_dependencies = install_packages_to_project( + project.installed_packages_dir, + package_identifiers, + project.package_backends, + ) + logger.info("Installed Packages: {0}".format(', '.join(( + package_data['meta']['package_name'] for package_data in installed_dependencies + )))) + + if save: + update_project_dependencies(project, installed_dependencies) + + +@package_cmd.command('build') +@click.option( + 'chain_names', + '--chain', + '-c', + multiple=True, + help=( + "Specifies which chains should be included in the deployments section " + "of the release." + ), +) +@click.option( + 'contract_instance_names', + '--contract-instance', + '-d', + multiple=True, + help=( + "Specifies the deployed contract instances to include in the release." + ), +) +@click.option( + 'contract_type_names', + '--contract-type', + '-t', + multiple=True, + help=( + "Specifies the contract types to include in the release" + ), +) +@click.option( + '--overwrite/--no-overwrite', + default=False, + help=( + "Specifies if this should overwrite any existing release lockfile" + ), +) +@click.option('--wait-for-sync/--no-wait-for-sync', default=True) +@click.pass_context +def package_build(ctx, + chain_names, + contract_instance_names, + contract_type_names, + overwrite, + wait_for_sync): + """ + Create a release. + """ + logger = logging.getLogger('populus.cli.build') + project = ctx.obj['PROJECT'] + + if not project.has_package_manifest: + logger.error("No package manifest found in project.") + ctx.exit(1) + + package_manifest = project.package_manifest + validate_package_manifest(package_manifest) + + version = package_manifest['version'] + + release_lockfile_path = get_lockfile_build_path( + project.build_asset_dir, + version, + ) + + if not overwrite and os.path.exists(release_lockfile_path): + cannot_overwrite_msg = ( + "Found an existing release lockfile for {version} at " + "{release_lockfile_path}. Run command again with --overwrite to " + "overwrite this file.".format( + version=version, + release_lockfile_path=release_lockfile_path, + ) + ) + logger.error(cannot_overwrite_msg) + ctx.exit(1) + + if chain_names and not contract_instance_names: + logger.error("Must specify which contracts you want to include in the deployments") + ctx.exit(1) + + release_lockfile = construct_release_lockfile( + project=project, + chain_names=chain_names, + contract_instance_names=contract_instance_names, + contract_type_names=contract_type_names, + ) + + validate_release_lockfile(release_lockfile) + + ensure_path_exists(project.build_asset_dir) + + write_release_lockfile(release_lockfile, release_lockfile_path) + + logger.info("Wrote release lock file: {0}".format(release_lockfile_path)) + + +@package_cmd.command('publish') +@click.argument( + 'release_lockfile_path', + type=click.Path( + exists=True, + file_okay=True, + dir_okay=False, + ), + nargs=1, +) +@click.option('--wait-for-sync/--no-wait-for-sync', default=True) +@click.pass_context +def package_publish(ctx, release_lockfile_path, wait_for_sync): + """ + Create a release. + """ + logger = logging.getLogger('populus.cli.publish') + project = ctx.obj['PROJECT'] + + if release_lockfile_path is None: + # TODO: select from `./build` dir + raise NotImplementedError("Not implemented") + + release_lockfile = load_release_lockfile(release_lockfile_path) + + validate_release_lockfile(release_lockfile) + + with project.get_chain('ropsten'): + package_backends = project.package_backends + + release_lockfile_uri = persist_package_file(release_lockfile_path, package_backends) + publishable_backends = get_publishable_backends( + release_lockfile, + release_lockfile_uri, + package_backends, + ) + + if not publishable_backends: + raise ValueError("TODO: handle this gracefully") + elif len(publishable_backends) > 1: + raise ValueError("TODO: handle this gracefully") + else: + backend_name, backend = tuple(publishable_backends.items())[0] + logger.info("Publishing to {0}".format(backend_name)) + backend.publish_release_lockfile(release_lockfile, release_lockfile_uri) diff --git a/populus/compilation/__init__.py b/populus/compilation/__init__.py index dedf15f1..a51b47a2 100644 --- a/populus/compilation/__init__.py +++ b/populus/compilation/__init__.py @@ -5,10 +5,11 @@ import os from populus.utils.compile import ( - get_project_source_paths, - get_test_source_paths, validate_compiled_contracts, post_process_compiled_contracts, + compute_project_compilation_arguments, + compute_test_compilation_arguments, + compute_installed_packages_compilation_arguments, ) from populus.utils.functional import ( get_duplicates, @@ -25,29 +26,56 @@ def _get_contract_key(contract_data): def compile_project_contracts(project): logger = logging.getLogger('populus.compilation.compile_project_contracts') - project_contract_source_paths = get_project_source_paths(project.contracts_source_dir) + project_source_paths, project_import_remappings = compute_project_compilation_arguments( + project.contracts_source_dir, + project.installed_packages_dir, + ) logger.debug( "Found %s project source files: %s", - len(project_contract_source_paths), - ", ".join(project_contract_source_paths), + len(project_source_paths), + project_source_paths, + ) + test_source_paths, test_import_remappings = compute_test_compilation_arguments( + project.tests_dir, + project.installed_packages_dir, ) - - test_contract_source_paths = get_test_source_paths(project.tests_dir) logger.debug( "Found %s test source files: %s", - len(test_contract_source_paths), - ", ".join(test_contract_source_paths), + len(test_source_paths), + test_source_paths, + ) + installed_packages_compilation_arguments = ( + compute_installed_packages_compilation_arguments(project.installed_packages_dir) + ) + if installed_packages_compilation_arguments: + installed_packages_source_paths, installed_packages_import_remappings = ( + installed_packages_compilation_arguments + ) + else: + installed_packages_source_paths = tuple() + installed_packages_import_remappings = tuple() + logger.debug( + "Found %s dependency source files: %s", + len(installed_packages_source_paths), + installed_packages_source_paths, ) all_source_paths = tuple(itertools.chain( - project_contract_source_paths, - test_contract_source_paths, + project_source_paths, + test_source_paths, + *installed_packages_source_paths + )) + all_import_remappings = tuple(itertools.chain( + project_import_remappings, + test_import_remappings, + project.config.get('compilation.import_remappings', []), + *installed_packages_import_remappings )) compiler_backend = project.get_compiler_backend() base_compiled_contracts = compiler_backend.get_compiled_contracts( source_file_paths=all_source_paths, - import_remappings=project.config.get('compilation.import_remappings'), + import_remappings=all_import_remappings, ) compiled_contracts = post_process_compiled_contracts(base_compiled_contracts) validate_compiled_contracts(compiled_contracts) diff --git a/populus/config/upgrade/v3.py b/populus/config/upgrade/v3.py index 34e01f13..3e6649be 100644 --- a/populus/config/upgrade/v3.py +++ b/populus/config/upgrade/v3.py @@ -31,9 +31,16 @@ NEW_V4_PATHS = { - 'compilation.backends.SolcCombinedJSON', + 'chains.mainnet.contracts.backends.InstalledPackages', + 'chains.ropsten.contracts.backends.InstalledPackages', + 'chains.temp.contracts.backends.InstalledPackages', + 'chains.tester.contracts.backends.InstalledPackages', + 'chains.testrpc.contracts.backends.InstalledPackages', 'compilation.backend', + 'compilation.backends.SolcCombinedJSON', 'compilation.import_remappings', + 'contracts.backends.InstalledPackages', + 'packaging', } MOVED_V3_PATHS = { diff --git a/populus/contracts/backends/installed_packages.py b/populus/contracts/backends/installed_packages.py new file mode 100644 index 00000000..3596fa64 --- /dev/null +++ b/populus/contracts/backends/installed_packages.py @@ -0,0 +1,139 @@ +from eth_utils import ( + compose, + to_tuple, +) + +from populus.contracts.exceptions import ( + NoKnownAddress, +) + +from populus.utils.chains import ( + check_if_chain_matches_chain_uri, +) +from populus.utils.contracts import ( + is_dependency_contract, + is_dependency_contract_name, + map_contracts_to_source_location, +) +from populus.utils.dependencies import ( + build_dependency_namespace_lookups, + recursive_find_installed_dependency_base_dirs, + get_release_lockfile_path, +) +from populus.utils.functional import ( + cached_property, + to_set, +) +from populus.utils.packaging import ( + load_release_lockfile, +) + +from .base import BaseContractBackend + + +@to_tuple +def get_deployed_contract_instances_from_installed_packages(web3, + installed_packages_dir, + instance_name): + installed_dependency_locations = recursive_find_installed_dependency_base_dirs( + installed_packages_dir, + ) + for package_base_dir in installed_dependency_locations: + release_lockfile_path = get_release_lockfile_path(package_base_dir) + release_lockfile = load_release_lockfile(release_lockfile_path) + + deployments = release_lockfile.get('deployments', {}) + for chain_uri, deployed_contract_instances in deployments.items(): + if not check_if_chain_matches_chain_uri(web3, chain_uri): + continue + if instance_name in deployed_contract_instances: + yield deployed_contract_instances[instance_name] + + +class InstalledPackagesBackend(BaseContractBackend): + """ + A contract backend that only acts as a provider sourcing contracts from + installed packages. + """ + is_registrar = True + is_provider = True + is_store = True + + # + # Registrar API + # + def get_contract_addresses(self, instance_name): + web3 = self.chain.web3 + deployed_instances = get_deployed_contract_instances_from_installed_packages( + web3, + self.chain.project.installed_packages_dir, + instance_name, + ) + if not deployed_instances: + raise NoKnownAddress("No deployed instances of {0} found".format(instance_name)) + return tuple( + deployed_instance['address'] for deployed_instance in deployed_instances + ) + + def set_contract_address(self, *args, **kwargs): + pass + + # + # Provider API + # + def get_contract_identifier(self, contract_name): + if is_dependency_contract_name(contract_name): + return contract_name + contract_name_to_dependency_namespace = compose( + self.dependencies_contract_data_source_path_lookup.__getitem__, + self.dependencies_namespace_lookup.__getitem__, + ) + return ":".join(( + contract_name_to_dependency_namespace(contract_name), + contract_name, + )) + + def get_all_contract_data(self): + namespaced_dependencies_contract_data = { + self.get_contract_identifier(contract_name): contract_data + for contract_name, contract_data + in self.dependencies_contract_data.items() + } + return namespaced_dependencies_contract_data + + @to_set + def get_all_contract_names(self): + for contract_identifier in self.get_all_contract_data().keys(): + _, _, contract_name = contract_identifier.rpartition(':') + yield contract_name + + # + # Private API + # + @cached_property + def all_dependency_base_dirs(self): + return recursive_find_installed_dependency_base_dirs( + self.chain.project.installed_packages_dir, + ) + + @cached_property + def dependencies_contract_data(self): + return { + contract_name: contract_data + for contract_name, contract_data + in self.chain.project.compiled_contract_data.items() + if is_dependency_contract(self.chain.project.installed_packages_dir, contract_data) + } + + @cached_property + def dependencies_contract_data_source_path_lookup(self): + return map_contracts_to_source_location( + self.dependencies_contract_data, + self.all_dependency_base_dirs, + ) + + @cached_property + def dependencies_namespace_lookup(self): + return build_dependency_namespace_lookups( + self.all_dependency_base_dirs, + ) diff --git a/populus/contracts/contract.py b/populus/contracts/contract.py index eb07b898..4248da59 100644 --- a/populus/contracts/contract.py +++ b/populus/contracts/contract.py @@ -11,6 +11,7 @@ from populus.utils.contracts import ( is_project_contract, is_test_contract, + is_dependency_contract, ) from populus.utils.functional import ( to_object, @@ -28,6 +29,10 @@ def build_populus_meta(chain, contract_data): 'is_project_contract', is_project_contract(chain.project.contracts_source_dir, contract_data), ) + yield ( + 'is_dependency_contract', + is_dependency_contract(chain.project.installed_packages_dir, contract_data), + ) yield ( 'is_test_contract', is_test_contract(chain.project.tests_dir, contract_data), diff --git a/populus/packages/__init__.py b/populus/packages/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/populus/packages/backends/__init__.py b/populus/packages/backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/populus/packages/backends/base.py b/populus/packages/backends/base.py new file mode 100644 index 00000000..9fca58c1 --- /dev/null +++ b/populus/packages/backends/base.py @@ -0,0 +1,84 @@ +class BasePackageBackend(object): + project = None + settings = None + + def __init__(self, project, settings): + self.project = project + self.settings = settings + self.setup_backend() + + def setup_backend(self): + pass + + # + # Read API primarily for package installation + # + def can_translate_package_identifier(self, package_identifier): + """ + Returns `True` or `False` as to whether this backend is capable of + translating this identifier. + """ + return False + + def translate_package_identifier(self, package_identifier): + """ + Returns the translated result of the package identifier. This should + always be returned as an iterable so that *special* identifiers can end + up being translated to mean (install multiple packages). + + Translation is the process of taking a package identifier and + converting it into another more basic format. The translation of + identifiers is a directed acyclic graph which when successful results + in an identifier that can be resolved to a release lockfile. + """ + raise NotImplementedError("Must be implemented by subclasses") + + def can_resolve_to_release_lockfile(self, package_identifier): + """ + Returns `True` or `False` as to whether this backend is capable of + resolving this identifier into a release lockfile for this package. + """ + return False + + def resolve_to_release_lockfile(self, package_identifier): + """ + Returns the release lockfile or raises + `populus.packages.exceptions.UnresolvablePackageIdentifier` if the + identifier cannot be resolved. + """ + raise NotImplementedError("Must be implemented by subclasses") + + def can_resolve_package_source_tree(self, release_lockfile): + return False + + def resolve_package_source_tree(self, release_lockfile): + raise NotImplementedError("Must be implemented by subclasses") + + # + # Write API primarily for publishing + # + def can_persist_package_file(self, file_path): + """ + Returns `True` or `False` as to whether this backend can persist the + provided file to whatever persistence source it uses. + """ + return False + + def persist_package_file(self, file_path): + """ + Persists the provided file to this backends persistence layer. + """ + raise NotImplementedError("Must be implemented by subclasses") + + def can_publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + """ + Returns `True` or `False` as to whether this backend can publish this + release lockfile. + """ + return False + + def publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + """ + Publishes the release lockfile. + """ + raise NotImplementedError("Must be implemented by subclasses") diff --git a/populus/packages/backends/index.py b/populus/packages/backends/index.py new file mode 100644 index 00000000..5fb35c98 --- /dev/null +++ b/populus/packages/backends/index.py @@ -0,0 +1,211 @@ +import os +import json + +import semver + +from web3.contract import Contract + +from populus import ASSETS_DIR + +from populus.config import ( + Web3Config, +) +from populus.utils.packaging import ( + is_direct_package_identifier, + is_aliased_package_identifier, + parse_package_identifier, + filter_versions, + get_max_version, + is_package_name, + is_aliased_package_name, +) +from .base import ( + BasePackageBackend, +) + + +PACKAGE_INDEX_ABI_PATH = os.path.join(ASSETS_DIR, 'package_index_abi.json') + + +class BasePackageIndexFactory(Contract): + def lookup_release_lockfile_uri(self, package_name, version): + version_info = semver.parse_version_info(version) + return self.call().getReleaseLockfileURI( + name=package_name, + major=version_info.major, + minor=version_info.minor, + patch=version_info.patch, + preRelease=version_info.prerelease or '', + build=version_info.build or '', + ) + + def get_all_versions(self, package_name): + all_release_hashes = self.call().getAllPackageReleaseHashes(package_name) + all_release_data = tuple(( + self.call().getReleaseData(release_hash) + for release_hash in all_release_hashes + )) + all_versions = tuple(( + semver.format_version(major, minor, patch, prerelease or None, build or None) + for major, minor, patch, prerelease, build, _, _, _ + in all_release_data + )) + return all_versions + + def is_known_package_name(self, package_name): + return self.call().packageExists(package_name) + + def release(self, package_name, version, release_lockfile_uri, transaction=None): + version_info = semver.parse_version_info(version) + release_txn_hash = self.transact(transaction).release( + name=package_name, + major=version_info.major, + minor=version_info.minor, + patch=version_info.patch, + preRelease=version_info.prerelease or '', + build=version_info.build or '', + releaseLockfileURI=release_lockfile_uri, + ) + return release_txn_hash + + +class BasePackageIndexBackend(BasePackageBackend): + def can_translate_package_identifier(self, package_identifier): + is_named_package_identifier = any(( + is_direct_package_identifier(package_identifier), + is_aliased_package_identifier(package_identifier), + )) + + if not is_named_package_identifier: + return False + + package_name, _, _ = parse_package_identifier(package_identifier) + return self.is_known_package_name(package_name) + + def translate_package_identifier(self, package_identifier): + if is_package_name(package_identifier): + latest_version = self.get_latest_version(package_identifier) + return ( + '=='.join((package_identifier, latest_version)), + ) + elif is_aliased_package_name(package_identifier): + _, _, package_name = package_identifier.partition(':') + return ( + package_name, + ) + else: + latest_matching_version = self.get_latest_matching_version(package_identifier) + package_name, comparison, _ = parse_package_identifier(package_identifier) + + if comparison == '==': + return ( + self.get_release_lockfile_for_version(package_name, latest_matching_version), + ) + else: + return ( + '=='.join((package_name, latest_matching_version)), + ) + + def can_publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + return True + + def get_latest_version(self, package_name): + all_versions = self.get_all_versions(package_name) + return get_max_version(all_versions) + + def get_latest_matching_version(self, package_identifier): + package_name, comparison, version = parse_package_identifier(package_identifier) + + if comparison is None and version is None: + return self.get_latest_version(package_name) + if comparison is None: + raise ValueError("Invariant") + if version is None: + raise ValueError("Invariant") + + all_versions = self.get_all_versions(package_name) + matching_versions = filter_versions(comparison, version, all_versions) + latest_matching_version = get_max_version(matching_versions) + + return latest_matching_version + + # + # Overide these API methods + # + def publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + raise NotImplementedError("Must be implemented by subclasses") + + def get_all_versions(self, package_name): + raise NotImplementedError("Must be implemented by subclasses") + + def is_known_package_name(self, package_name): + raise NotImplementedError("Must be implemented by subclasses") + + def get_release_lockfile_for_version(self, package_name, version): + raise NotImplementedError("Must be implemented by subclasses") + + +class PackageIndexBackend(BasePackageIndexBackend): + package_index_for_install = None + package_index_for_publish = None + + def setup_backend(self): + if 'web3-for-install' in self.settings: + self.package_index_for_install = self.get_package_index_for_install() + if 'web3-for-publish' in self.settings: + self.package_index_for_publish = self.get_package_index_for_publish() + + def can_translate_package_identifier(self, package_identifier): + return 'web3-for-install' in self.settings + + def can_publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + return 'web3-for-publish' in self.settings + + def publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + publish_txn_hash = self.package_index_for_publish.release( + release_lockfile['package_name'], + release_lockfile['version'], + release_lockfile_uri, + ) + return publish_txn_hash + + def is_known_package_name(self, package_name): + return self.package_index_for_install.is_known_package_name(package_name) + + def get_all_versions(self, package_name): + return self.package_index_for_install.get_all_versions(package_name) + + def get_release_lockfile_for_version(self, package_name, version): + return self.package_index_for_install.lookup_release_lockfile_uri(package_name, version) + + # + # Internal API + # + def get_web3_for_install(self): + web3_config = self.settings.get_config('web3-for-install', config_class=Web3Config) + web3 = web3_config.get_web3() + return web3 + + def get_package_index_for_install(self): + PackageIndexFactory = self.get_package_index_factory(self.get_web3_for_install()) + package_index_address = self.settings['package_index_address'] + return PackageIndexFactory(address=package_index_address) + + def get_web3_for_publish(self): + web3_config = self.settings.get_config('web3-for-publish', config_class=Web3Config) + web3 = web3_config.get_web3() + return web3 + + def get_package_index_for_publish(self): + PackageIndexFactory = self.get_package_index_factory(self.get_web3_for_publish()) + package_index_address = self.settings['package_index_address'] + return PackageIndexFactory(address=package_index_address) + + def get_package_index_factory(self, web3): + with open(PACKAGE_INDEX_ABI_PATH) as package_index_abi_file: + package_index_abi = json.load(package_index_abi_file) + + return web3.eth.contract( + abi=package_index_abi, + ContractFactoryClass=BasePackageIndexFactory, + ) diff --git a/populus/packages/backends/ipfs.py b/populus/packages/backends/ipfs.py new file mode 100644 index 00000000..29ffd727 --- /dev/null +++ b/populus/packages/backends/ipfs.py @@ -0,0 +1,117 @@ +import json + +import ipfsapi + +from eth_utils import ( + force_text, + to_dict, +) + +from populus.packages.exceptions import ( + LockfileResolutionError, +) + +from populus.utils.ipfs import ( + is_ipfs_uri, + create_ipfs_uri, + extract_ipfs_path_from_uri, + walk_ipfs_tree, +) +from populus.utils.packaging import ( + is_aliased_ipfs_uri, +) + +from .base import ( + BasePackageBackend, +) + + +class BaseIPFSPackageBackend(BasePackageBackend): + def can_translate_package_identifier(self, package_identifier): + return is_aliased_ipfs_uri(package_identifier) + + def translate_package_identifier(self, package_identifier): + _, _, ipfs_uri = package_identifier.partition('@') + return ( + ipfs_uri, + ) + + def can_resolve_to_release_lockfile(self, package_identifier): + if is_ipfs_uri(package_identifier): + return True + return False + + def resolve_to_release_lockfile(self, package_identifier): + ipfs_path = extract_ipfs_path_from_uri(package_identifier) + + try: + lockfile_contents = self.get_file_from_ipfs(ipfs_path) + except ipfsapi.exceptions.StatusError as err: + raise LockfileResolutionError(str(err)) + + release_lockfile = json.loads(force_text(lockfile_contents)) + return release_lockfile + + def can_resolve_package_source_tree(self, release_lockfile): + sources = release_lockfile.get('sources') + if sources is None: + return False + return all( + is_ipfs_uri(value) for value in sources.values() + ) + + def can_persist_package_file(self, file_path): + return True + + def persist_package_file(self, file_path): + """ + Persists the provided file to this backends persistence layer. + """ + ipfs_file_hash = self.push_file_to_ipfs(file_path) + ipfs_uri = create_ipfs_uri(ipfs_file_hash) + return ipfs_uri + + # + # Subclass API + # + def push_file_to_ipfs(self, file_path): + raise NotImplementedError("Must be implemented by subclasses") + + def get_file_from_ipfs(self, ipfs_path): + raise NotImplementedError("Must be implemented by subclasses") + + +class IPFSPackageBackend(BaseIPFSPackageBackend): + """ + Package backend that resolves IPFS URIs + """ + def setup_backend(self): + ipfs_host = self.settings['host'] + ipfs_port = self.settings['port'] + self.ipfs_client = ipfsapi.connect(ipfs_host, ipfs_port) + + @to_dict + def resolve_package_source_tree(self, release_lockfile): + sources = release_lockfile['sources'] + + for source_path, source_value in sources.items(): + if is_ipfs_uri(source_value): + ipfs_path = extract_ipfs_path_from_uri(source_value) + ipfs_source_tree = walk_ipfs_tree(self.ipfs_client, ipfs_path, source_path) + for sub_path, source_hash in ipfs_source_tree.items(): + source_content = self.ipfs_client.cat(source_hash) + yield sub_path, source_content + else: + yield source_path, source_value + + def push_file_to_ipfs(self, file_path): + """ + Persists the provided file to this backends persistence layer. + """ + result = self.ipfs_client.add(file_path) + ipfs_file_hash = result['Hash'] + return ipfs_file_hash + + def get_file_from_ipfs(self, ipfs_path): + file_contents = self.ipfs_client.cat(ipfs_path) + return file_contents diff --git a/populus/packages/backends/lockfile.py b/populus/packages/backends/lockfile.py new file mode 100644 index 00000000..32732c2c --- /dev/null +++ b/populus/packages/backends/lockfile.py @@ -0,0 +1,38 @@ +from populus.utils.packaging import ( + is_filesystem_release_lockfile_path, + is_aliased_filesystem_release_lockfile_path, + load_release_lockfile, +) + +from .base import ( + BasePackageBackend, +) + + +class LocalFilesystemLockfileBackend(BasePackageBackend): + """ + Backend for package installation that can be used to install the current package. + """ + def can_translate_package_identifier(self, package_identifier): + return is_aliased_filesystem_release_lockfile_path(package_identifier) + + def translate_package_identifier(self, package_identifier): + if is_aliased_filesystem_release_lockfile_path(package_identifier): + _, _, release_lockfile_path = package_identifier.partition('@') + return ( + release_lockfile_path, + ) + else: + raise ValueError("Unsupported identifier: {0}".format(package_identifier)) + + def can_resolve_to_release_lockfile(self, package_identifier): + return is_filesystem_release_lockfile_path(package_identifier) + + def resolve_to_release_lockfile(self, package_identifier): + if is_filesystem_release_lockfile_path(package_identifier): + release_lockfile_path = package_identifier + else: + raise ValueError("Unsupported identifier: {0}".format(package_identifier)) + + release_lockfile = load_release_lockfile(release_lockfile_path) + return release_lockfile diff --git a/populus/packages/backends/manifest.py b/populus/packages/backends/manifest.py new file mode 100644 index 00000000..33f15602 --- /dev/null +++ b/populus/packages/backends/manifest.py @@ -0,0 +1,26 @@ +from populus.utils.packaging import ( + is_local_project_package_identifier, + construct_package_identifier, +) + +from .base import ( + BasePackageBackend, +) + + +class LocalManifestBackend(BasePackageBackend): + """ + Backend for package installation that can be used to install the current package. + """ + def can_translate_package_identifier(self, package_identifier): + return is_local_project_package_identifier( + self.project.project_dir, + package_identifier, + ) + + def translate_package_identifier(self, package_identifier): + return tuple(( + construct_package_identifier(dependency_name, identifier) + for dependency_name, identifier + in self.project.dependencies.items() + )) diff --git a/populus/packages/build.py b/populus/packages/build.py new file mode 100644 index 00000000..e9b75b31 --- /dev/null +++ b/populus/packages/build.py @@ -0,0 +1,217 @@ +import os +import itertools + +from eth_utils import ( + to_dict, +) + +from populus.utils.chains import ( + get_chain_definition, +) +from populus.utils.contracts import ( + is_contract_name, + EMPTY_BYTECODE_VALUES, +) +from populus.utils.dependencies import ( + extract_build_dependendencies_from_installed_packages, +) +from populus.utils.linking import ( + find_link_references, +) +from populus.utils.mappings import ( + deep_merge_dicts, +) +from populus.utils.packaging import ( + validate_package_manifest, + persist_package_file, +) + + +@to_dict +def construct_release_lockfile(project, + chain_names, + contract_instance_names, + contract_type_names): + if not project.has_package_manifest: + raise ValueError("No package manifest found in project") + + package_manifest = project.package_manifest + validate_package_manifest(package_manifest) + + yield 'lockfile_version', '1' + yield 'package_name', package_manifest['package_name'] + yield 'version', package_manifest['version'] + + package_backends = project.package_backends + + source_file_uris = { + os.path.join('.', file_path): persist_package_file(file_path, package_backends) + for file_path in project.contract_source_paths + } + if source_file_uris: + yield 'sources', source_file_uris + + deployments = construct_deployments(project, chain_names, contract_instance_names) + if deployments: + yield 'deployments', deployments + + package_meta = construct_package_meta_data(package_manifest) + if package_meta: + yield 'meta', package_meta + + # TODO: check if there are discrepancies between what is *supposed* to be + # installed and what is and figure out how to resolve them. + construct_dependencies = construct_build_dependencies( + project.installed_packages_dir, + project.dependencies, + ) + if construct_dependencies: + yield 'build_dependencies', construct_dependencies + + contract_types_names_from_deployments = { + contract_instance['contract_type'] + for deployed_instances in deployments.values() + for contract_instance in deployed_instances.values() + if is_contract_name(contract_instance['contract_type']) + } + all_contract_type_names = tuple(sorted(set(itertools.chain( + contract_type_names, + contract_types_names_from_deployments, + )))) + + contract_types = construct_contract_types( + project.compiled_contract_data, + all_contract_type_names, + ) + if contract_types: + yield 'contract_types', contract_types + + +@to_dict +def construct_deployments(project, chain_names, contract_instance_names): + for chain_name in chain_names: + with project.get_chain(chain_name) as chain: + chain_definition = get_chain_definition(chain.web3) + provider = chain.provider + deployed_contract_instances = construct_deployments_object( + provider, + contract_instance_names, + ) + yield chain_definition, deployed_contract_instances + + +@to_dict +def construct_build_dependencies(installed_packages_dir, project_dependencies): + installed_dependencies = extract_build_dependendencies_from_installed_packages( + installed_packages_dir, + ) + for dependency_name, dependency_identifier in installed_dependencies.items(): + if dependency_name in project_dependencies: + yield dependency_name, dependency_identifier + + +@to_dict +def construct_contract_types(compiled_contract_data, contract_type_names): + for contract_type_name in contract_type_names: + contract_data = compiled_contract_data[contract_type_name] + contract_type_object = construct_contract_type_object( + contract_data, + contract_type_name, + ) + yield contract_type_name, contract_type_object + + +@to_dict +def construct_package_meta_data(package_manifest): + if 'authors' in package_manifest: + yield 'authors', package_manifest['authors'] + if 'license' in package_manifest: + yield 'license', package_manifest['license'] + if 'description' in package_manifest: + yield 'description', package_manifest['description'] + if 'keywords' in package_manifest: + yield 'keywords', package_manifest['keywords'] + if 'links' in package_manifest: + yield 'links', package_manifest['links'] + + +@to_dict +def construct_deployed_contract_instance(provider, + contract_name): + contract_instance = provider.get_contract(contract_name) + base_contract_factory = provider.get_base_contract_factory(contract_name) + + yield 'contract_type', contract_instance.populus_meta.contract_type_name + yield 'address', contract_instance.address + + runtime_bytecode = base_contract_factory.bytecode_runtime + if runtime_bytecode not in EMPTY_BYTECODE_VALUES: + yield 'runtime_bytecode', runtime_bytecode + + link_references = find_link_references( + runtime_bytecode, + provider.get_all_contract_names(), + ) + + # TODO: scrape all installed package manifests for the names of deployed + # contracts who's contract class name matches this reference. + link_dependencies = tuple( + construct_link_value(provider, link_reference) + for link_reference + in link_references + ) + + if link_dependencies: + yield 'link_dependencies', link_dependencies + + +@to_dict +def construct_link_value(provider, link_reference): + yield 'offset', link_reference.offset + link_reference_contract_instance = provider.get_contract_factory(link_reference.full_name) + yield 'value', link_reference_contract_instance.populus_meta.contract_type_name + + +@to_dict +def construct_deployments_object(provider, contract_names_to_include): + for contract_name in contract_names_to_include: + deployed_contract_instance = construct_deployed_contract_instance( + provider, + contract_name, + ) + yield contract_name, deployed_contract_instance + + +@to_dict +def construct_contract_type_object(contract_data, + contract_type_name): + yield 'contract_name', contract_type_name + + if contract_data.get('bytecode') not in EMPTY_BYTECODE_VALUES: + yield 'bytecode', contract_data['bytecode'] + + if contract_data.get('bytecode_runtime') not in EMPTY_BYTECODE_VALUES: + yield 'runtime_bytecode', contract_data['bytecode_runtime'] + + if 'abi' in contract_data: + yield 'abi', contract_data['abi'] + + if 'userdoc' or 'devdoc' in contract_data: + natspec = deep_merge_dicts( + contract_data.get('userdoc', {}), + contract_data.get('devdoc', {}), + ) + yield 'natspec', natspec + + if 'runtime_bytecode' in contract_data or 'bytecode' in contract_data: + yield 'compiler', construct_compiler_object(contract_data['metadata']) + + +@to_dict +def construct_compiler_object(metadata): + yield 'type', 'solc' + yield 'version', metadata['compiler']['version'] + yield 'settings', { + 'optimize': metadata['settings']['optimizer']['enabled'], + 'optimize_runs': metadata['settings']['optimizer']['runs'], + } diff --git a/populus/packages/exceptions.py b/populus/packages/exceptions.py new file mode 100644 index 00000000..890eba2b --- /dev/null +++ b/populus/packages/exceptions.py @@ -0,0 +1,10 @@ +class UnsupportedPackageIdentifier(Exception): + pass + + +class UnresolvablePackageIdentifier(Exception): + pass + + +class LockfileResolutionError(Exception): + pass diff --git a/populus/packages/installation.py b/populus/packages/installation.py new file mode 100644 index 00000000..1db75d4a --- /dev/null +++ b/populus/packages/installation.py @@ -0,0 +1,192 @@ +import os +import json +import shutil + +from eth_utils import ( + is_bytes, + to_tuple, +) + +from populus.utils.dependencies import ( + get_build_identifier_lockfile_path, + get_dependency_base_dir, + get_install_identifier_lockfile_path, + get_installed_packages_dir, + get_release_lockfile_path, +) +from populus.utils.filesystem import ( + ensure_file_exists, + ensure_path_exists, + is_under_path, + remove_dir_if_exists, + tempdir, +) +from populus.utils.packaging import ( + compute_identifier_tree, + construct_dependency_identifier, + flatten_identifier_tree, + recursively_resolve_package_data, +) + + +def install_packages_to_project(installed_packages_dir, package_identifiers, package_backends): + """ + 1. Recursively resolve all dependencies. + 2. Filter out any dependencies that are already met. + 3. Write dependencies to filesystem + + Maybe: + - check all chain identifiers are found on the current chain. + """ + identifier_tree = compute_identifier_tree( + package_identifiers, + package_backends, + ) + flattened_identifier_tree = flatten_identifier_tree( + identifier_tree, + ) + package_data_to_install = tuple( + recursively_resolve_package_data(package_identifier_lineage, package_backends) + for package_identifier_lineage + in flattened_identifier_tree + ) + # TODO: Filter out dependencies that are already satisfied. + # TODO: Detect duplicate dependency names + installed_packages = write_installed_packages( + installed_packages_dir, + package_data_to_install, + ) + + return installed_packages + + +@to_tuple +def write_installed_packages(installed_packages_dir, package_data_to_install): + with tempdir() as temporary_dir: + temp_installed_packages_dir = get_installed_packages_dir(temporary_dir) + + if os.path.exists(installed_packages_dir): + shutil.copytree(installed_packages_dir, temp_installed_packages_dir) + else: + ensure_path_exists(temp_installed_packages_dir) + + sorted_package_data_to_install = sorted( + package_data_to_install, + key=lambda pd: pd['meta']['dependency_name'] + ) + + for package_data in sorted_package_data_to_install: + write_package_files(temp_installed_packages_dir, package_data) + yield package_data + else: + # Upon successful writing of all dependencies, move + remove_dir_if_exists(installed_packages_dir) + shutil.move(temp_installed_packages_dir, installed_packages_dir) + + +def write_package_files(installed_packages_dir, package_data): + with tempdir() as temporary_dir: + package_meta = package_data['meta'] + + dependency_name = package_meta['dependency_name'] + + # Compute the location the package should be installed to. + dependency_base_dir = get_dependency_base_dir( + installed_packages_dir, + dependency_name, + ) + + # Setup a temporary location to write files. + temp_install_location = get_dependency_base_dir( + temporary_dir, + dependency_name, + ) + ensure_path_exists(temp_install_location) + + # Write the package source tree. + package_source_tree = package_data['source_tree'] + for rel_source_path, source_content in package_source_tree.items(): + source_path = os.path.join(temp_install_location, rel_source_path) + if not is_under_path(temp_install_location, source_path): + raise ValueError( + "Package is attempting to write files outside of the " + "installation directory.\n'{0}'".format(rel_source_path) + ) + ensure_file_exists(source_path) + mode = 'wb' if is_bytes(source_content) else 'w' + with open(source_path, mode) as source_file: + source_file.write(source_content) + + # Write the `lock.json` lockfile + if package_data['lockfile'] is not None: + lockfile_path = get_release_lockfile_path( + temp_install_location, + ) + with open(lockfile_path, 'w') as lockfile_file: + lockfile_file.write(json.dumps( + package_data['lockfile'], + indent=2, + sort_keys=True, + )) + + # Write the `build_identifier.lock` lockfile + build_identifier_lockfile_path = get_build_identifier_lockfile_path( + temp_install_location, + ) + with open(build_identifier_lockfile_path, 'w') as build_identifier_lockfile_file: + build_identifier_lockfile_file.write(package_meta['build_identifier']) + + # Write the `install_identifier.lock` lockfile + install_identifier_lockfile_path = get_install_identifier_lockfile_path( + temp_install_location, + ) + with open(install_identifier_lockfile_path, 'w') as install_identifier_lockfile_file: + install_identifier_lockfile_file.write(package_meta['install_identifier']) + + # Now recursively write dependency packages. + installed_packages_dir_for_dependencies = get_installed_packages_dir( + temp_install_location, + ) + write_installed_packages( + installed_packages_dir_for_dependencies, + package_data['dependencies'], + ) + + # Upon successful writing of all dependencies move the fully installed + # package dir to the real installed_packages location. + remove_dir_if_exists(dependency_base_dir) + shutil.move(temp_install_location, dependency_base_dir) + return dependency_base_dir + + +def update_project_dependencies(project, installed_dependencies): + if not project.has_package_manifest: + with open(project.package_manifest_path, 'w') as package_manifest_file: + json.dump({}, package_manifest_file) + + package_manifest = project.package_manifest + package_manifest.setdefault('dependencies', {}) + + for package_data in installed_dependencies: + package_meta = package_data['meta'] + + dependency_name = package_meta['dependency_name'] + install_identifier = package_meta['install_identifier'] + build_identifier = package_meta['build_identifier'] + + dependency_identifier = construct_dependency_identifier( + dependency_name, + install_identifier, + build_identifier, + ) + + package_manifest['dependencies'][dependency_name] = dependency_identifier + + with open(project.package_manifest_path, 'w') as package_manifest_file: + json.dump( + package_manifest, + package_manifest_file, + sort_keys=True, + indent=2, + separators=(',', ': '), + ) diff --git a/populus/pb/__init__.py b/populus/pb/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/populus/pb/ipfs_file_pb2.py b/populus/pb/ipfs_file_pb2.py new file mode 100644 index 00000000..d69c2d0b --- /dev/null +++ b/populus/pb/ipfs_file_pb2.py @@ -0,0 +1,228 @@ +# flake8: noqa +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: file.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='file.proto', + package='', + syntax='proto2', + serialized_pb=_b('\n\nfile.proto\"\xa1\x01\n\x04\x44\x61ta\x12\x1c\n\x04Type\x18\x01 \x02(\x0e\x32\x0e.Data.DataType\x12\x0c\n\x04\x44\x61ta\x18\x02 \x01(\x0c\x12\x10\n\x08\x66ilesize\x18\x03 \x01(\x04\x12\x12\n\nblocksizes\x18\x04 \x03(\x04\"G\n\x08\x44\x61taType\x12\x07\n\x03Raw\x10\x00\x12\r\n\tDirectory\x10\x01\x12\x08\n\x04\x46ile\x10\x02\x12\x0c\n\x08Metadata\x10\x03\x12\x0b\n\x07Symlink\x10\x04\"3\n\x06PBLink\x12\x0c\n\x04Hash\x18\x01 \x01(\x0c\x12\x0c\n\x04Name\x18\x02 \x01(\t\x12\r\n\x05Tsize\x18\x03 \x01(\x04\".\n\x06PBNode\x12\x16\n\x05Links\x18\x02 \x03(\x0b\x32\x07.PBLink\x12\x0c\n\x04\x44\x61ta\x18\x01 \x01(\x0c') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_DATA_DATATYPE = _descriptor.EnumDescriptor( + name='DataType', + full_name='Data.DataType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='Raw', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Directory', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='File', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Metadata', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='Symlink', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=105, + serialized_end=176, +) +_sym_db.RegisterEnumDescriptor(_DATA_DATATYPE) + + +_DATA = _descriptor.Descriptor( + name='Data', + full_name='Data', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='Type', full_name='Data.Type', index=0, + number=1, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='Data', full_name='Data.Data', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filesize', full_name='Data.filesize', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='blocksizes', full_name='Data.blocksizes', index=3, + number=4, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DATA_DATATYPE, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=15, + serialized_end=176, +) + + +_PBLINK = _descriptor.Descriptor( + name='PBLink', + full_name='PBLink', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='Hash', full_name='PBLink.Hash', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='Name', full_name='PBLink.Name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='Tsize', full_name='PBLink.Tsize', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=178, + serialized_end=229, +) + + +_PBNODE = _descriptor.Descriptor( + name='PBNode', + full_name='PBNode', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='Links', full_name='PBNode.Links', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='Data', full_name='PBNode.Data', index=1, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=231, + serialized_end=277, +) + +_DATA.fields_by_name['Type'].enum_type = _DATA_DATATYPE +_DATA_DATATYPE.containing_type = _DATA +_PBNODE.fields_by_name['Links'].message_type = _PBLINK +DESCRIPTOR.message_types_by_name['Data'] = _DATA +DESCRIPTOR.message_types_by_name['PBLink'] = _PBLINK +DESCRIPTOR.message_types_by_name['PBNode'] = _PBNODE + +Data = _reflection.GeneratedProtocolMessageType('Data', (_message.Message,), dict( + DESCRIPTOR = _DATA, + __module__ = 'file_pb2' + # @@protoc_insertion_point(class_scope:Data) + )) +_sym_db.RegisterMessage(Data) + +PBLink = _reflection.GeneratedProtocolMessageType('PBLink', (_message.Message,), dict( + DESCRIPTOR = _PBLINK, + __module__ = 'file_pb2' + # @@protoc_insertion_point(class_scope:PBLink) + )) +_sym_db.RegisterMessage(PBLink) + +PBNode = _reflection.GeneratedProtocolMessageType('PBNode', (_message.Message,), dict( + DESCRIPTOR = _PBNODE, + __module__ = 'file_pb2' + # @@protoc_insertion_point(class_scope:PBNode) + )) +_sym_db.RegisterMessage(PBNode) + + +# @@protoc_insertion_point(module_scope) diff --git a/populus/project.py b/populus/project.py index 30c7f013..5ea57bab 100644 --- a/populus/project.py +++ b/populus/project.py @@ -1,5 +1,10 @@ -import os import itertools +import json +import os + +from eth_utils import ( + to_dict, +) from populus.compilation import ( compile_project_contracts, @@ -20,17 +25,33 @@ get_build_asset_dir, get_compiled_contracts_asset_path, get_contracts_source_dir, + get_dependency_source_paths, get_project_source_paths, get_test_source_paths, ) -from populus.utils.filesystem import ( - relpath, - get_latest_mtime, -) from populus.utils.config import ( check_if_json_config_file_exists, get_default_project_config_file_path, get_json_config_file_path, + sort_prioritized_configs, +) +from populus.utils.dependencies import ( + recursive_find_installed_dependency_base_dirs, + get_installed_dependency_locations, + get_installed_packages_dir, +) +from populus.utils.filesystem import ( + get_latest_mtime, + relpath, +) +from populus.utils.functional import ( + cached_property, +) +from populus.utils.module_loading import ( + import_string, +) +from populus.utils.packaging import ( + get_project_package_manifest_path, ) from populus.utils.testing import ( get_tests_dir, @@ -123,7 +144,68 @@ def tests_dir(self): return get_tests_dir(self.project_dir) # - # Contracts + # Packaging: Manifest + # + @property + def has_package_manifest(self): + return os.path.exists(self.package_manifest_path) + + @property + @relpath + def package_manifest_path(self): + return get_project_package_manifest_path(self.project_dir) + + @property + def package_manifest(self): + with open(self.package_manifest_path) as package_manifest_file: + return json.load(package_manifest_file) + + # + # Packaging: Installed Packages + # + @property + def dependencies(self): + if self.has_package_manifest: + package_manifest = self.package_manifest + else: + package_manifest = {} + package_dependencies = package_manifest.get('dependencies', {}) + return package_dependencies + + @property + @relpath + def installed_packages_dir(self): + return get_installed_packages_dir(self.project_dir) + + @property + @to_dict + def installed_dependency_locations(self): + # TODO: rename to `installed_dependency_locations` + return get_installed_dependency_locations(self.installed_packages_dir) + + # + # Packaging: Backends + # + @to_dict + def get_package_backend_config(self): + package_backend_config = self.config.get_config('packaging.backends') + return sort_prioritized_configs(package_backend_config, self.config) + + @cached_property + @to_dict + def package_backends(self): + for backend_name, backend_config in self.get_package_backend_config().items(): + PackageBackendClass = import_string(backend_config['class']) + yield ( + backend_name, + PackageBackendClass( + self, + backend_config.get_config('settings'), + ), + ) + + # + # Contract Source and Compilation # @property @relpath @@ -138,6 +220,10 @@ def contracts_source_dir(self): get_contracts_source_dir(self.project_dir), ) + @property + def contract_source_paths(self): + return get_project_source_paths(self.contracts_source_dir) + @property @relpath def build_asset_dir(self): @@ -147,9 +233,16 @@ def build_asset_dir(self): _cached_compiled_contracts = None def get_all_source_file_paths(self): + dependency_source_paths = tuple(itertools.chain.from_iterable( + get_dependency_source_paths(dependency_base_dir) + for dependency_base_dir + in recursive_find_installed_dependency_base_dirs(self.installed_packages_dir) + )) + return tuple(itertools.chain( get_project_source_paths(self.contracts_source_dir), get_test_source_paths(self.tests_dir), + dependency_source_paths, )) def is_compiled_contract_cache_stale(self): diff --git a/populus/utils/cli.py b/populus/utils/cli.py index 9f9bdb4b..1832f426 100644 --- a/populus/utils/cli.py +++ b/populus/utils/cli.py @@ -126,11 +126,12 @@ def configure_chain(project, chain_name): logger.info('-' * len(start_msg)) if is_existing_chain: + # TODO: this should probably show flattened out config keys current_configuration_msg = "\n".join(itertools.chain(( "Current Configuration", ), ( " {key} = {value}".format(key=key, value=value) - for key, value in chain_config.items() + for key, value in chain_config.items() # TODO: Config.items() doesn't exist. ))) logger.info(current_configuration_msg) @@ -267,10 +268,18 @@ def deploy_contract_and_verify(chain, Deploy a contract, displaying information about the deploy process as it happens. This also verifies that the deployed contract's bytecode matches the expected value. + + TODO: the `ContractFactory` keyword here is special in that it is only + present so that this can be used to deploy the `Registrar`. It seems like + the `Registrar` should just be merged into the available contract + factories, or even be a *special* contract in which case it should be given + a different name. """ - web3 = chain.web3 logger = logging.getLogger('populus.utils.cli.deploy_contract_and_verify') + web3 = chain.web3 + provider = chain.provider + if is_account_locked(web3, web3.eth.defaultAccount or web3.eth.coinbase): try: chain.wait.for_unlock(web3.eth.defaultAccount or web3.eth.coinbase, 5) @@ -283,7 +292,7 @@ def deploy_contract_and_verify(chain, logger.info("Deploying {0}".format(contract_name)) if ContractFactory is None: - ContractFactory = chain.provider.get_contract_factory(contract_name) + ContractFactory = provider.get_contract_factory(contract_name) deploy_txn_hash = ContractFactory.deploy( transaction=deploy_transaction, diff --git a/populus/utils/compile.py b/populus/utils/compile.py index ec4c4a27..638fce98 100644 --- a/populus/utils/compile.py +++ b/populus/utils/compile.py @@ -19,6 +19,8 @@ ) from eth_utils import ( + add_0x_prefix, + to_dict, to_tuple, is_string, ) @@ -36,12 +38,21 @@ compute_deploy_order, ) from .filesystem import ( + is_same_path, recursive_find_files, ensure_file_exists, ) from .json import ( normalize_object_for_json, ) +from .functional import ( + star_zip_return, +) +from .dependencies import ( + get_installed_dependency_locations, + get_installed_packages_dir, + recursive_find_installed_dependency_base_dirs, +) DEFAULT_CONTRACTS_DIR = "./contracts/" @@ -71,6 +82,9 @@ def get_compiled_contracts_asset_path(build_asset_dir): return compiled_contracts_asset_path +EXCLUDE_INSTALLED_PACKAGES_GLOB = "./installed_packages/*" + + @to_tuple def find_solidity_source_files(base_dir): return ( @@ -90,14 +104,144 @@ def get_test_source_paths(tests_dir): return test_source_paths +@to_tuple +def get_dependency_source_paths(dependency_base_dir): + """ + Find all of the solidity source files for the given dependency, excluding + any of the source files that belong to any sub-dependencies. + """ + source_files_to_exclude = recursive_find_files( + dependency_base_dir, + EXCLUDE_INSTALLED_PACKAGES_GLOB, + ) + for source_file_path in find_solidity_source_files(dependency_base_dir): + for exclude_path in source_files_to_exclude: + if is_same_path(source_file_path, exclude_path): + continue + yield source_file_path + + +@to_tuple +def compute_import_remappings(source_paths, installed_dependency_locations): + source_and_remapping_pairs = itertools.product( + sorted(source_paths), + sorted(installed_dependency_locations.items()), + ) + + for import_path, (package_name, package_source_dir) in source_and_remapping_pairs: + yield "{import_path}:{package_name}={package_source_dir}".format( + import_path=import_path, + package_name=package_name, + package_source_dir=package_source_dir, + ) + + +def compute_project_compilation_arguments(contracts_source_dir, + installed_packages_dir): + project_source_paths = get_project_source_paths(contracts_source_dir) + + installed_dependency_locations = get_installed_dependency_locations( + installed_packages_dir, + ) + + project_import_remappings = compute_import_remappings( + project_source_paths, + installed_dependency_locations, + ) + return project_source_paths, project_import_remappings + + +def compute_test_compilation_arguments(tests_dir, + installed_packages_dir): + test_source_paths = get_project_source_paths(tests_dir) + + installed_dependency_locations = get_installed_dependency_locations( + installed_packages_dir, + ) + + test_import_remappings = compute_import_remappings( + test_source_paths, + installed_dependency_locations, + ) + return test_source_paths, test_import_remappings + + +@star_zip_return +@to_tuple +def compute_installed_packages_compilation_arguments(installed_packages_dir): + all_dependency_base_dirs = recursive_find_installed_dependency_base_dirs( + installed_packages_dir, + ) + + for dependency_base_dir in all_dependency_base_dirs: + ( + dependency_source_paths, + dependency_import_remappings, + ) = compute_dependency_compilation_arguments(dependency_base_dir) + yield dependency_source_paths, dependency_import_remappings + + +def compute_dependency_compilation_arguments(dependency_base_dir): + dependency_source_paths = get_dependency_source_paths(dependency_base_dir) + dependency_installed_packages_dir = get_installed_packages_dir(dependency_base_dir) + + installed_sub_dependencies = get_installed_dependency_locations( + dependency_installed_packages_dir, + ) + + dependency_import_remappings = compute_import_remappings( + dependency_source_paths, + installed_sub_dependencies, + ) + return dependency_source_paths, dependency_import_remappings + + +def _load_json_if_string(value): + if is_string(value): + return json.loads(value) + else: + return value + + +@to_dict +def normalize_contract_data(contract_data): + if 'metadata' in contract_data: + yield 'metadata', normalize_contract_metadata(contract_data['metadata']) + if 'bin' in contract_data: + yield 'bytecode', add_0x_prefix(contract_data['bin']) + if 'bin-runtime' in contract_data: + yield 'bytecode_runtime', add_0x_prefix(contract_data['bin-runtime']) + if 'abi' in contract_data: + yield 'abi', _load_json_if_string(contract_data['abi']) + if 'userdoc' in contract_data: + yield 'userdoc', _load_json_if_string(contract_data['userdoc']) + if 'devdoc' in contract_data: + yield 'devdoc', _load_json_if_string(contract_data['devdoc']) + + +def process_compiler_output(name_from_compiler, data_from_compiler): + _, _, contract_name = name_from_compiler.rpartition(':') + contract_data = normalize_contract_data(data_from_compiler) + return contract_name, contract_data + + +def normalize_contract_metadata(metadata): + if not metadata: + return None + elif is_string(metadata): + return json.loads(metadata) + else: + raise ValueError("Unknown metadata format '{0}'".format(metadata)) + + def write_compiled_sources(compiled_contracts_asset_path, compiled_sources): logger = logging.getLogger('populus.compilation.write_compiled_sources') ensure_file_exists(compiled_contracts_asset_path) - with open(compiled_contracts_asset_path, 'w') as outfile: + with open(compiled_contracts_asset_path, 'w') as compiled_contracts_asset_file: json.dump( normalize_object_for_json(compiled_sources), - outfile, + compiled_contracts_asset_file, sort_keys=True, indent=4, separators=(',', ': '), @@ -161,15 +305,6 @@ def load_json_if_string(value): return value -def normalize_contract_metadata(metadata): - if not metadata: - return None - elif is_string(metadata): - return json.loads(metadata) - else: - raise ValueError("Unknown metadata format '{0}'".format(metadata)) - - V1 = 'v1' diff --git a/populus/utils/contracts.py b/populus/utils/contracts.py index be3a4106..88c521e4 100644 --- a/populus/utils/contracts.py +++ b/populus/utils/contracts.py @@ -12,6 +12,9 @@ from .string import ( normalize_class_name, ) +from .packaging import ( + is_package_name, +) def is_project_contract(contracts_source_dir, contract_data): @@ -22,6 +25,10 @@ def is_test_contract(tests_dir, contract_data): return is_under_path(tests_dir, contract_data['source_path']) +def is_dependency_contract(project_installed_packages_dir, contract_data): + return is_under_path(project_installed_packages_dir, contract_data['source_path']) + + def package_contracts(contract_factories): _dict = { '__len__': lambda s: len(contract_factories), @@ -114,6 +121,40 @@ def is_contract_name(value): return bool(re.match(CONTRACT_NAME_REGEX, value)) +def is_dependency_contract_name(value): + dependency_name, _, contract_name = value.partition(':') + if not is_package_name(dependency_name): + return False + elif not is_contract_name(contract_name): + return False + else: + return True + + +@to_dict +def map_contracts_to_source_location(compiled_contract_data, source_locations): + source_locations_by_depth = sorted(source_locations, reverse=True) + contracts_by_source_path = { + contract_name: contract_data['source_path'] + for contract_name, contract_data + in compiled_contract_data.items() + } + for contract_name, contract_source_path in contracts_by_source_path.items(): + for source_location in source_locations_by_depth: + if is_under_path(source_location, contract_source_path): + yield (contract_name, source_location) + break + else: + raise ValueError( + "Contract `{0}` from source path `{1}` could not be mapped to " + "any of the following source locations:\n- {2}".format( + contract_name, + contract_source_path, + '\n- '.join(source_locations_by_depth), + ) + ) + + EMPTY_BYTECODE_VALUES = {None, "0x"} diff --git a/populus/utils/dependencies.py b/populus/utils/dependencies.py new file mode 100644 index 00000000..577c3452 --- /dev/null +++ b/populus/utils/dependencies.py @@ -0,0 +1,196 @@ +import os +import itertools + +from eth_utils import ( + to_tuple, + to_dict, + to_ordered_dict, +) + +from .filesystem import ( + is_under_path, + normpath, +) + + +INSTALLED_PACKAGES_BASE_DIRNAME = './installed_packages' + + +@normpath +def get_installed_packages_dir(base_dir): + """ + Returns the `./installed_packages` directory for the given `base_dir` + """ + return os.path.join(base_dir, INSTALLED_PACKAGES_BASE_DIRNAME) + + +@normpath +def get_dependency_base_dir(installed_packages_dir, dependency_name): + """ + Returns the directory within `./installed_packages` that the dependency + would be installed to. + """ + dependency_base_dir = os.path.join(installed_packages_dir, dependency_name) + return dependency_base_dir + + +def is_dependency_base_dir(directory_path): + """ + Returns file path where the release lockfile for the current project at the + given version. + """ + release_lockfile_path = get_release_lockfile_path(directory_path) + return os.path.exists(release_lockfile_path) + + +def extract_dependency_name_from_base_dir(dependency_base_dir): + """ + Extract the dependency name from the directory where the dependency is + installed + """ + return os.path.basename(dependency_base_dir.rstrip('/')) + + +RELEASE_LOCKFILE_FILENAME = 'lock.json' + + +@normpath +def get_release_lockfile_path(dependency_base_dir): + """ + Extract the dependency name from the directory where the dependency is + installed + """ + return os.path.join(dependency_base_dir, RELEASE_LOCKFILE_FILENAME) + + +INSTALL_IDENTIFIER_LOCKFILE_NAME = 'install_identifier.lock' + + +@normpath +def get_install_identifier_lockfile_path(dependency_base_dir): + """ + Returns file path where the root identifier for the installed dependency is stored. + """ + install_identifier_lockfile_path = os.path.join( + dependency_base_dir, + INSTALL_IDENTIFIER_LOCKFILE_NAME, + ) + return install_identifier_lockfile_path + + +def get_install_identifier(dependency_base_dir): + """ + Gets the install_identifier from the translated identifier lockfile + within a dependency's base dir. + """ + install_identifier_lockfile_path = get_install_identifier_lockfile_path(dependency_base_dir) + with open(install_identifier_lockfile_path) as install_identifier_lockfile_file: + install_identifier = install_identifier_lockfile_file.read().strip() + return install_identifier + + +BUILD_IDENTIFIER_LOCKFILE_NAME = 'build_identifier.lock' + + +@normpath +def get_build_identifier_lockfile_path(dependency_base_dir): + """ + Returns file path where the fully translated identifier for the installed + dependency is stored. + """ + build_identifier_lockfile_path = os.path.join( + dependency_base_dir, + BUILD_IDENTIFIER_LOCKFILE_NAME, + ) + return build_identifier_lockfile_path + + +def get_build_identifier(dependency_base_dir): + """ + Gets the build_identifier from the translated identifier lockfile + within a dependency's base dir. + """ + build_identifier_lockfile_path = get_build_identifier_lockfile_path(dependency_base_dir) + with open(build_identifier_lockfile_path) as build_identifier_lockfile_file: + build_identifier = build_identifier_lockfile_file.read().strip() + return build_identifier + + +RELEASE_LOCKFILE_BUILD_FILENAME = '{version}.json' + + +@normpath +def get_lockfile_build_path(build_asset_dir, version_string): + """ + Returns file path where the release lockfile for the current project at the + given version. + """ + filename = RELEASE_LOCKFILE_BUILD_FILENAME.format(version=version_string) + release_lockfile_build_path = os.path.join(build_asset_dir, filename) + return release_lockfile_build_path + + +@to_ordered_dict +def get_installed_dependency_locations(installed_packages_dir): + if os.path.exists(installed_packages_dir): + for maybe_package_dir in os.listdir(installed_packages_dir): + dependency_base_dir = get_dependency_base_dir( + installed_packages_dir, + maybe_package_dir, + ) + if is_dependency_base_dir(dependency_base_dir): + yield ( + extract_dependency_name_from_base_dir(dependency_base_dir), + dependency_base_dir, + ) + + +@to_tuple +def recursive_find_installed_dependency_base_dirs(installed_packages_dir): + """ + Return a tuple of all filesystem paths directly under the given + `installed_packages_dir` that look like dependency base dirs including all + sub dependencies. + """ + installed_dependency_locations = get_installed_dependency_locations(installed_packages_dir) + + for package_base_dir in installed_dependency_locations.values(): + yield package_base_dir + + package_installed_packages_dir = get_installed_packages_dir(package_base_dir) + + sub_base_dirs = recursive_find_installed_dependency_base_dirs( + package_installed_packages_dir, + ) + for sub_package_base_dir in sub_base_dirs: + yield sub_package_base_dir + + +@to_dict +def build_dependency_namespace_lookups(dependency_base_dirs): + for base_dir in dependency_base_dirs: + dependency_name = extract_dependency_name_from_base_dir(base_dir) + parent_dependency_names = tuple(sorted( + extract_dependency_name_from_base_dir(parent_base_dir) + for parent_base_dir + in dependency_base_dirs + if is_under_path(parent_base_dir, base_dir) + )) + dependency_namespace = ':'.join(itertools.chain( + parent_dependency_names, + (dependency_name,), + )) + yield (base_dir, dependency_namespace) + + +@to_dict +def extract_build_dependendencies_from_installed_packages(installed_packages_dir): + """ + Extract the current installed dependencies for creation of the + `build_dependencies` section of a release lockfile. + """ + installed_dependency_locations = get_installed_dependency_locations(installed_packages_dir) + for dependency_name, dependency_base_dir in installed_dependency_locations.items(): + dependency_name = extract_dependency_name_from_base_dir(dependency_base_dir) + dependency_identifier = get_build_identifier(dependency_base_dir) + yield dependency_name, dependency_identifier diff --git a/populus/utils/exception/__init__.py b/populus/utils/exception/__init__.py new file mode 100644 index 00000000..454a0ce2 --- /dev/null +++ b/populus/utils/exception/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import + +import sys + + +# raise MyException() from original_exception compatibility +if sys.version_info.major == 2: + from .exception_py2 import raise_from # noqa: F401 +else: + from .exception_py3 import raise_from # noqa: F401 diff --git a/populus/utils/exception/exception_py2.py b/populus/utils/exception/exception_py2.py new file mode 100644 index 00000000..e0a5cda9 --- /dev/null +++ b/populus/utils/exception/exception_py2.py @@ -0,0 +1,5 @@ +import sys + + +def raise_from(my_exception, other_exception): + raise my_exception, None, sys.exc_info()[2] # noqa: W602, E999 diff --git a/populus/utils/exception/exception_py3.py b/populus/utils/exception/exception_py3.py new file mode 100644 index 00000000..cedbed34 --- /dev/null +++ b/populus/utils/exception/exception_py3.py @@ -0,0 +1,2 @@ +def raise_from(my_exception, other_exception): + raise my_exception from other_exception diff --git a/populus/utils/functional.py b/populus/utils/functional.py index da3b01c5..65ece2f4 100644 --- a/populus/utils/functional.py +++ b/populus/utils/functional.py @@ -2,6 +2,11 @@ import functools import itertools +from eth_utils import ( + to_tuple, + compose, +) + from .string import ( normalize_class_name, ) @@ -46,6 +51,13 @@ def inner(*args, **kwargs): return outer +def star_apply(fn): + @functools.wraps(fn) + def inner(args): + return fn(*args) + return inner + + def apply_to_return_value(callback): def outer(fn): @functools.wraps(fn) @@ -57,6 +69,7 @@ def inner(*args, **kwargs): chain_return = apply_to_return_value(itertools.chain.from_iterable) +star_zip_return = compose(apply_to_return_value(star_apply(zip)), to_tuple) to_set = apply_to_return_value(set) diff --git a/populus/utils/ipfs.py b/populus/utils/ipfs.py new file mode 100644 index 00000000..de9631f8 --- /dev/null +++ b/populus/utils/ipfs.py @@ -0,0 +1,153 @@ +import operator +import hashlib + +import ipfsapi + +from eth_utils import ( + to_dict, + compose, +) + +from populus.pb.ipfs_file_pb2 import ( + Data, + PBNode, +) + +from .six import ( + parse, +) +from .base58 import ( + b58encode, +) + + +def create_ipfs_uri(ipfs_hash): + return "ipfs://{0}".format(ipfs_hash) + + +def is_ipfs_uri(value): + parse_result = parse.urlparse(value) + if parse_result.scheme != 'ipfs': + return False + if not parse_result.netloc and not parse_result.path: + return False + + return True + + +def extract_ipfs_path_from_uri(value): + parse_result = parse.urlparse(value) + + if parse_result.netloc: + if parse_result.path: + return ''.join((parse_result.netloc, parse_result.path)) + else: + return parse_result.netloc + else: + return parse_result.path.lstrip('/') + + +def resolve_ipfs_path_to_hash(ipfs_client, ipfs_path): + result = ipfs_client.resolve(ipfs_path) + resolved_path = result['Path'] + _, _, ipfs_hash = resolved_path.rpartition('/') + return ipfs_hash + + +def get_ipfs_object_type(ipfs_client, ipfs_path): + object_metadata = ipfs_client.file_ls(ipfs_path) + + if not object_metadata: + return False + + resolved_hash = object_metadata['Arguments'][ipfs_path] + + type_getter = compose( + operator.itemgetter('Objects'), + operator.itemgetter(resolved_hash), + operator.itemgetter('Type'), + ) + + return type_getter(object_metadata) + + +def is_directory(ipfs_client, ipfs_path): + return get_ipfs_object_type(ipfs_client, ipfs_path) == 'Directory' + + +def is_file(ipfs_client, ipfs_path): + return get_ipfs_object_type(ipfs_client, ipfs_path) == 'File' + + +@to_dict +def walk_ipfs_tree(ipfs_client, ipfs_path, prefix='./'): + """ + Given an IPFS hash or path, this walks down the filesystem tree and returns + a generator of 2-tuples where the first item is the filesystem path and the + second value is the ipfs hash of the file that belongs at that hash + """ + try: + ipfs_hash = resolve_ipfs_path_to_hash(ipfs_client, ipfs_path) + except ipfsapi.exceptions.StatusError as err: + raise type(err)( + "Unable to resolve IPFS Path:\n" + "- {0} -> {1}\n" + "{2}".format( + prefix, + ipfs_path, + str(err), + ) + ) + + if is_file(ipfs_client, ipfs_hash): + yield (prefix, ipfs_hash) + elif is_directory(ipfs_client, ipfs_hash): + links = ipfs_client.file_ls(ipfs_hash)['Objects'][ipfs_hash]['Links'] + + for link in links: + link_hash = link['Hash'] + link_name = link['Name'] + + if is_file(ipfs_client, link_hash): + sub_prefix = '{prefix}{name}'.format(prefix=prefix, name=link_name) + yield (sub_prefix, link_hash) + elif is_directory(ipfs_client, link_hash): + sub_prefix = '{prefix}{name}/'.format(prefix=prefix, name=link_name) + for value in walk_ipfs_tree(ipfs_client, link_hash, sub_prefix): + yield value + else: + raise ValueError("Unsupported type. Must be an IPFS file or directory") + + +SHA2_256 = b'\x12' +LENGTH_32 = b'\x20' + + +def multihash(value): + data_hash = hashlib.sha256(value).digest() + + multihash_bytes = SHA2_256 + LENGTH_32 + data_hash + return multihash_bytes + + +def serialize_file(file_path): + file_data = open(file_path, 'rb').read() + file_size = len(file_data) + + data_protobuf = Data( + Type=Data.DataType.Value('File'), + Data=file_data, + filesize=file_size, + ) + data_protobuf_bytes = data_protobuf.SerializeToString() + + file_protobuf = PBNode(Links=[], Data=data_protobuf_bytes) + + return file_protobuf + + +def generate_file_hash(file_path): + file_protobuf = serialize_file(file_path) + file_protobuf_bytes = file_protobuf.SerializeToString() + file_multihash = multihash(file_protobuf_bytes) + return b58encode(file_multihash) diff --git a/populus/utils/packaging.py b/populus/utils/packaging.py new file mode 100644 index 00000000..21077b46 --- /dev/null +++ b/populus/utils/packaging.py @@ -0,0 +1,670 @@ +import os +import hashlib +import functools +import operator +import re +import json + +import semver + +import jsonschema + +from eth_utils import ( + force_bytes, + to_tuple, + to_dict, + to_ordered_dict, +) + +from populus import ASSETS_DIR + +from populus.packages.exceptions import ( + LockfileResolutionError, +) + +from .exception import ( + raise_from, +) +from .filesystem import ( + is_same_path, + normpath, +) +from .ipfs import ( + is_ipfs_uri, +) + + +SUPPORTED_PACKAGE_MANIFEST_VERSIONS = {'1'} + + +PACKAGE_MANIFEST_FILENAME = './ethpm.json' + + +@normpath +def get_project_package_manifest_path(project_dir): + """ + Returns filesystem path for the project's package manifest file (ethpm.json) + """ + return os.path.join(project_dir, PACKAGE_MANIFEST_FILENAME) + + +PACKAGE_NAME_REGEX = '[a-z][-a-z0-9]{0,213}' + +EXACT_PACKAGE_NAME_REGEX = ( + "^" + "{package_name_regex}" + "$" +).format( + package_name_regex=PACKAGE_NAME_REGEX, +) + + +def is_package_name(value): + """ + Returns boolean whether the value is a valid package name. + """ + return bool(re.match(EXACT_PACKAGE_NAME_REGEX, value)) + + +def is_aliased_package_name(value): + """ + Returns boolean whether the value is a valid package name. + """ + alias, _, package_name = value.partition(':') + return is_package_name(alias) and is_package_name(package_name) + + +IDENTIFIER_VERSION_SPECIFIERS = ( + "==", + ">=", + ">", + "<=", + "<", +) + +IDENTIFIER_VERSION_COMPARISON_REGEX = "|".join(IDENTIFIER_VERSION_SPECIFIERS) + +VERSION_NUMBER_PART_REGEX = "(?:0|[1-9][0-9]*)" +PRERELEASE_REGEX = ( + "(?:0|[1-9A-Za-z-][0-9A-Za-z-]*)" + "(\.(?:0|[1-9A-Za-z-][0-9A-Za-z-]*))*" +) +BUILD_REGEX = ( + "[0-9A-Za-z-]+" + "(\.[0-9A-Za-z-]+)*" +) + +VERSON_NUMBER_REGEX = ( + "(?P{version_number_part_regex})" + "\." + "(?P{version_number_part_regex})" + "\." + "(?P{version_number_part_regex})" + "(\-(?P{prerelease_regex}))?" + "(\+(?P{build_regex}))?" +).format( + version_number_part_regex=VERSION_NUMBER_PART_REGEX, + prerelease_regex=PRERELEASE_REGEX, + build_regex=BUILD_REGEX, +) + +PACKAGE_IDENTIFIER_REGEX = ( + "(?P{package_name_regex})" + "((?P{version_comparison_regex})(?P{version_number_regex}))?" +).format( + package_name_regex=PACKAGE_NAME_REGEX, + version_comparison_regex=IDENTIFIER_VERSION_COMPARISON_REGEX, + version_number_regex=VERSON_NUMBER_REGEX, +) + + +EXACT_PACKAGE_IDENTIFIER_REGEX = ( + "^" + "{package_identifier_regex}" + "$" +).format( + package_identifier_regex=PACKAGE_IDENTIFIER_REGEX +) + + +def is_direct_package_identifier(value): + """ + Returns boolean whether the value is a non-aliased package identifier which + declares a package_name and possibly a version specifier. + """ + return bool(re.match(EXACT_PACKAGE_IDENTIFIER_REGEX, value)) + + +ALIASED_PACKAGE_IDENTIFIER_REGEX = ( + "^" + "{package_name_regex}" + "\:" + "{package_identifier_regex}" + "$" +).format( + package_name_regex=PACKAGE_NAME_REGEX, + package_identifier_regex=PACKAGE_IDENTIFIER_REGEX +) + + +def is_aliased_package_identifier(value): + """ + Returns boolean whether the value is a package identifier which has an alias. + """ + return bool(re.match(ALIASED_PACKAGE_IDENTIFIER_REGEX, value)) + + +DEPENDENCY_VERSION_SPECIFIERS = ( + ">=", + ">", + "<=", + "<", +) + +DEPENDENCY_VERSION_COMPARISON_REGEX = "|".join(DEPENDENCY_VERSION_SPECIFIERS) + +DEPENDENCY_VERSION_REGEX = ( + "^" + "(?P{version_comparison_regex})?(?P{version_number_regex})" + "$" +).format( + version_comparison_regex=DEPENDENCY_VERSION_COMPARISON_REGEX, + version_number_regex=VERSON_NUMBER_REGEX, +) + + +def is_version_specifier(value): + """ + Returns boolean whether the value is a version number of version number + range. + """ + return bool(re.match(DEPENDENCY_VERSION_REGEX, value)) + + +EXACT_VERSION_REGEX = ( + "^" + "(?P{version_number_regex})" + "$" +).format( + version_number_regex=VERSON_NUMBER_REGEX, +) + + +def is_exact_version(value): + """ + Returns boolean whether the value is an exact version number + """ + return bool(re.match(EXACT_VERSION_REGEX, value)) + + +def is_local_project_package_identifier(project_dir, package_identifier): + """ + Returns boolean whether the value is the filesystem path to this project + directory. + """ + if not os.path.exists(package_identifier): + return False + return is_same_path(package_identifier, project_dir) + + +def is_aliased_ipfs_uri(value): + """ + Returns boolean whether the value is an IPFS URI with an alias. + """ + dependency_name, _, maybe_ipfs_uri = value.partition('@') + return all(( + is_package_name(dependency_name), + is_ipfs_uri(maybe_ipfs_uri) + )) + + +def is_filesystem_release_lockfile_path(package_identifier): + """ + Returns boolean whether the value a filesystem path to a release lockfile. + """ + if not os.path.exists(package_identifier): + return False + elif not os.path.isfile(package_identifier): + return False + + try: + load_release_lockfile(package_identifier) + except json.JSONDecodeError: + return False + + return True + + +def is_aliased_filesystem_release_lockfile_path(package_identifier): + """ + Returns boolean whether the value a filesystem path to a release lockfile + with an alias. + """ + dependency_name, _, maybe_release_lockfile_path = package_identifier.partition('@') + return all(( + is_package_name(dependency_name), + is_filesystem_release_lockfile_path(maybe_release_lockfile_path), + )) + + +def parse_package_identifier(value): + """ + Parse a package identifier returning the package name, the type of version + comparison and the version number for that comparison. Both + version_comparison and version may be `None` + """ + if is_aliased_package_identifier(value): + _, _, value = value.partition(':') + + match = re.match(PACKAGE_IDENTIFIER_REGEX, value) + if match is None: + raise ValueError("Unsupported package identifier format: {0}".format(value)) + parts = match.groupdict() + return parts['package_name'], parts['version_comparison'], parts['version'] + + +def construct_package_identifier(dependency_name, dependency_identifier): + """ + Construct a package identifier string from a dependency name and the + associated identifier. + """ + if is_direct_package_identifier(dependency_identifier): + return "{dependency_name}:{package_identifier}".format( + dependency_name=dependency_name, + package_identifier=dependency_identifier, + ) + elif is_ipfs_uri(dependency_identifier): + return "{dependency_name}@{ipfs_uri}".format( + dependency_name=dependency_name, + ipfs_uri=dependency_identifier, + ) + elif is_version_specifier(dependency_identifier): + if is_exact_version(dependency_identifier): + return "{dependency_name}=={version}".format( + dependency_name=dependency_name, + version=dependency_identifier, + ) + else: + return "{dependency_name}{version}".format( + dependency_name=dependency_name, + version=dependency_identifier, + ) + else: + raise ValueError("Unsupported Identifier: '{0}'".format(dependency_identifier)) + + +def construct_dependency_identifier(dependency_name, install_identifier, build_identifier): + if is_direct_package_identifier(install_identifier): + package_name, comparison, version = parse_package_identifier(install_identifier) + if package_name == dependency_name: + if comparison == '==': + return version + else: + return ''.join((comparison, version)) + else: + return install_identifier + elif is_ipfs_uri(install_identifier): + return install_identifier + else: + raise ValueError("Unsupported root identifier: {0}".format(install_identifier)) + + +def extract_dependency_name_from_identifier_lineage(package_identifier_lineage, + release_lockfile): + """ + Extracts and returns the `dependency_name` from the list of translated + identifier names. + """ + for package_identifier in package_identifier_lineage: + if is_aliased_package_identifier(package_identifier): + dependency_name, _, _ = package_identifier.partition(':') + return dependency_name + elif is_aliased_ipfs_uri(package_identifier): + dependency_name, _, _ = package_identifier.partition('@') + return dependency_name + elif is_aliased_filesystem_release_lockfile_path(package_identifier): + dependency_name, _, _ = package_identifier.partition('@') + return dependency_name + elif is_package_name(package_identifier): + return package_identifier + elif is_direct_package_identifier(package_identifier): + package_name, _, _ = parse_package_identifier(package_identifier) + return package_name + return release_lockfile['package_name'] + + +def validate_package_manifest(package_manifest): + """ + Validate a package manifest against the expected schema. + """ + # TODO: implement jsonschema validation + pass + + +RELEASE_LOCKFILE_SCHEMA_FILENAME = 'release-lockfile-v1.schema.json' + + +def load_release_lockfile_schema(): + schema_path = os.path.join(ASSETS_DIR, RELEASE_LOCKFILE_SCHEMA_FILENAME) + with open(schema_path) as schema_file: + schema = json.load(schema_file) + return schema + + +def validate_release_lockfile(release_lockfile): + """ + Validate a release lockfile against the expected schema. + + TODO: additional validation that isn't covered by the JSON-schema + - valid relative file paths + - referenced package names + - whatever else... + """ + # dump and then reload the lockfile to coerce any tuples into lists. + # otherwise jsonschema gets mad because it won't accept a tuple in place of + # an array. + release_lockfile_for_validation = json.loads(json.dumps(release_lockfile)) + release_lockfile_schema = load_release_lockfile_schema() + jsonschema.validate(release_lockfile_for_validation, release_lockfile_schema) + + +def load_release_lockfile(release_lockfile_path, validate=True): + with open(release_lockfile_path) as release_lockfile_file: + release_lockfile = json.load(release_lockfile_file) + + if validate: + validate_release_lockfile(release_lockfile) + return release_lockfile + + +def write_release_lockfile(release_lockfile, release_lockfile_path): + with open(release_lockfile_path, 'w') as release_lockfile_file: + json.dump(release_lockfile, release_lockfile_file, sort_keys=True, indent=2) + + +def extract_install_identifier(package_identifier_lineage): + """ + Returns the root identifier from the translated lineage of the package + identifier. + """ + for identifier in package_identifier_lineage: + if is_package_name(identifier): + continue + elif is_direct_package_identifier(identifier): + return identifier + elif is_ipfs_uri(identifier): + return identifier + else: + raise ValueError("No valid install identifiers found in package identifier lineage") + + +def extract_package_metadata(package_identifier_lineage, + release_lockfile): + """ + Construct the installation metadata. + """ + return { + 'version': release_lockfile['version'], + 'package_name': release_lockfile['package_name'], + 'dependency_name': extract_dependency_name_from_identifier_lineage( + package_identifier_lineage, + release_lockfile, + ), + 'install_identifier': extract_install_identifier(package_identifier_lineage), + 'build_identifier': package_identifier_lineage[-1], + } + + +def translate_package_identifier(package_identifier, package_backends): + """ + Find the first backend which can translate the given `package_identifier` + and return the translated package identifier. + """ + for backend in package_backends.values(): + if backend.can_translate_package_identifier(package_identifier): + return backend.translate_package_identifier(package_identifier) + else: + raise ValueError( + "No package backends are able to translate the identifier: " + "{0}".format(package_identifier) + ) + + +def fingerprint_identifier(package_identifier): + """ + Construct a simple hash of the package identifier. This is used to detect + recursive circular translation loops. + """ + return hashlib.md5(force_bytes(package_identifier)).hexdigest() + + +@to_dict +def compute_identifier_tree(identifier_set, package_backends, seen_fingerprints=None): + """ + Compute the directed acyclic graph of the package identifiers. All leaf + nodes are package identifiers which can be resolved to their release + lockfiles. + """ + if seen_fingerprints is None: + seen_fingerprints = set() + + for package_identifier in identifier_set: + is_resolvable = any( + backend.can_resolve_to_release_lockfile(package_identifier) + for backend + in package_backends.values() + ) + is_translatable = any( + backend.can_translate_package_identifier(package_identifier) + for backend + in package_backends.values() + ) + + if is_resolvable: + yield package_identifier, None + elif is_translatable: + fingerprint = fingerprint_identifier(package_identifier) + if fingerprint in seen_fingerprints: + raise ValueError("Translation error. Non-acyclic tranlation graph detected") + + translated_package_identifiers = translate_package_identifier( + package_identifier, + package_backends, + ) + + yield ( + package_identifier, + compute_identifier_tree( + translated_package_identifiers, + package_backends, + seen_fingerprints={fingerprint} | seen_fingerprints, + ), + ) + else: + raise ValueError( + "Untranslatable and Unresolvable identifier: {0}".format(package_identifier) + ) + + +@to_tuple +def flatten_identifier_tree(identifier_tree): + """ + Takes the identifier tree produced by `compute_identifier_tree` + and flattens it so that there is one entry for each leaf node. + """ + for key, value in identifier_tree.items(): + if value is None: + yield (key,) + else: + for sub_value in flatten_identifier_tree(value): + yield (key,) + sub_value + + +def resolve_to_release_lockfile(package_identifier, package_backends): + """ + Find the first backend which can resolve the package identifier to the + release lockfile and return the resolved release lockfile. + """ + for _, backend in package_backends.items(): + if backend.can_resolve_to_release_lockfile(package_identifier): + return backend.resolve_to_release_lockfile(package_identifier) + else: + raise ValueError( + "None of the configured package backends support resolving the " + "identifier '{0}'".format(package_identifier) + ) + + +def resolve_package_source_tree(release_lockfile, package_backends): + """ + Find the first backend which can resolve package source tree for te geven + release lockfile and return the resolved package source tree. + """ + for _, backend in package_backends.items(): + if backend.can_resolve_package_source_tree(release_lockfile): + return backend.resolve_package_source_tree(release_lockfile) + else: + continue + else: + if not release_lockfile.get('sources'): + return {} + raise ValueError( + "None of the configured package backends could resolve the source tree for" + "'{0}'".format(release_lockfile) + ) + + +def persist_package_file(file_path, package_backends): + """ + Find the first backend which is capable of persisting the given file path + and persist the given file. + """ + for backend in package_backends.values(): + if backend.can_persist_package_file(file_path): + return backend.persist_package_file(file_path) + else: + raise ValueError( + "None of the configured package backends could persist '{0}'".format( + file_path, + ) + ) + + +@to_ordered_dict +def get_publishable_backends(release_lockfile, release_lockfile_uri, package_backends): + """ + Return the package backends which are capable of publishing the given + release lockfile and corresponding URI. + """ + for backend_name, backend in package_backends.items(): + if backend.can_publish_release_lockfile(release_lockfile, release_lockfile_uri): + yield backend_name, backend + + +def recursively_resolve_package_data(package_identifier_lineage, package_backends): + """ + Given a fully translated package identifier lineage, resolve all release + lockfiles and underlying package source trees. + """ + try: + release_lockfile = resolve_to_release_lockfile( + package_identifier_lineage[-1], + package_backends, + ) + except LockfileResolutionError as err: + raise_from( + LockfileResolutionError( + "Error resolving lockfile for {0}:\n" + "{1}".format( + " > ".join(package_identifier_lineage), + str(err), + ) + ), + err, + ) + + package_source_tree = resolve_package_source_tree(release_lockfile, package_backends) + + # Validate + validate_release_lockfile(release_lockfile) + + # Compute package metadata + package_meta = extract_package_metadata( + package_identifier_lineage, + release_lockfile, + ) + + package_build_dependencies = release_lockfile.get('build_dependencies', {}) + + dependency_identifiers = tuple( + construct_package_identifier(dependency_name, dependency_identifier) + for dependency_name, dependency_identifier + in package_build_dependencies.items() + ) + dependency_identifier_tree = compute_identifier_tree( + dependency_identifiers, + package_backends, + ) + flattened_dependency_identifier_tree = flatten_identifier_tree( + dependency_identifier_tree, + ) + + resolved_dependencies = tuple( + recursively_resolve_package_data( + dependency_identifier_lineage, + package_backends, + ) for dependency_identifier_lineage in flattened_dependency_identifier_tree + ) + + return { + 'meta': package_meta, + 'lockfile': release_lockfile, + 'source_tree': package_source_tree, + 'dependencies': resolved_dependencies, + } + + +def is_version_match(version, comparison, version_target): + """ + Return boolean whether the `version` matches the `version_target` for the + given string representation of the comparison. + """ + if comparison == '==': + comparison_fn = operator.eq + elif comparison == '>': + comparison_fn = operator.gt + elif comparison == '>=': + comparison_fn = operator.ge + elif comparison == '<': + comparison_fn = operator.lt + elif comparison == '<=': + comparison_fn = operator.le + else: + raise ValueError("Unsupported comparison") + + version_info = semver.parse_version_info(version) + version_target_info = semver.parse_version_info(version_target) + + return comparison_fn(version_info, version_target_info) + + +def filter_versions(comparison, version_target, all_versions): + """ + Return the version strings from `all_versions` which match `version_target` + for the given comparison. + """ + return { + version + for version + in all_versions + if is_version_match(version, comparison, version_target) + } + + +def get_max_version(all_versions): + """ + Return the largest version from the given versions. + """ + if not all_versions: + raise ValueError('Must pass in at least 1 version string.') + return functools.reduce(semver.max_ver, all_versions) diff --git a/populus/utils/testing.py b/populus/utils/testing.py index 077739e0..136abe5b 100644 --- a/populus/utils/testing.py +++ b/populus/utils/testing.py @@ -42,6 +42,15 @@ def outer(fn): return outer +def load_example_package(example_package_name): + def outer(fn): + if not hasattr(fn, '_populus_packages_to_load'): + fn._populus_packages_to_load = [] + fn._populus_packages_to_load.append(example_package_name) + return fn + return outer + + DEFAULT_TESTS_DIR = "./tests/" diff --git a/setup.py b/setup.py index 95c9fdce..78bff829 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ setup( name='populus', - version="1.8.1", + version="2.0.0-alpha.4", description="""Ethereum Development Framework""", long_description=readme, author='Piper Merriam', @@ -27,13 +27,16 @@ "contextlib2>=0.5.4", "eth-testrpc>=1.3.0", "ethereum-utils>=0.2.0", + "ipfsapi>=0.4.0", "jsonschema>=2.5.1", + "protobuf>=3.0.0", "py-geth>=1.9.0", "py-solc>=1.2.0", "pylru>=1.0.9", "pysha3>=0.3,!=1.0,>1.0.0", "pytest>=2.7.2", "semantic_version>=2.6.0", + "semver>=2.7.2", # TODO: remove this dependency from 2.0 code "toolz>=0.8.2", "toposort>=1.4", "watchdog>=0.8.3", diff --git a/tests/cli/test_deploy_cmd.py b/tests/cli/test_deploy_cmd.py index 481da8fa..a19119dc 100644 --- a/tests/cli/test_deploy_cmd.py +++ b/tests/cli/test_deploy_cmd.py @@ -1,10 +1,7 @@ -import pytest - import os import re import click - from click.testing import CliRunner from populus.cli import main diff --git a/tests/compilation/test_solc_standard_json_backend.py b/tests/compilation/test_solc_standard_json_backend.py index c4fbcb92..26a79470 100644 --- a/tests/compilation/test_solc_standard_json_backend.py +++ b/tests/compilation/test_solc_standard_json_backend.py @@ -105,3 +105,65 @@ def test_compiling_example_greeter_contract(project): _, compiled_contracts = compile_project_contracts(project) assert 'Greeter' in compiled_contracts + + +@pytest.mark.skipif( + not solc_supports_standard_json_interface(), + reason="Solc compiler does not support standard json compilation", +) +@load_example_package('owned') +def test_compiling_with_single_installed_package(project): + source_paths, contract_data = compile_project_contracts(project) + + assert 'owned' in contract_data + + +@pytest.mark.skipif( + not solc_supports_standard_json_interface(), + reason="Solc compiler does not support standard json compilation", +) +@load_example_package('owned') +@load_example_package('standard-token') +def test_compiling_with_multiple_installed_packages(project): + source_paths, contract_data = compile_project_contracts(project) + + assert 'owned' in contract_data + assert 'Token' in contract_data + assert 'StandardToken' in contract_data + + +@pytest.mark.skipif( + not solc_supports_standard_json_interface(), + reason="Solc compiler does not support standard json compilation", +) +@load_example_package('transferable') +def test_compiling_with_nested_installed_packages(project): + source_paths, contract_data = compile_project_contracts(project) + + assert 'owned' in contract_data + assert 'transferable' in contract_data + + +@pytest.mark.skipif( + not solc_supports_standard_json_interface(), + reason="Solc compiler does not support standard json compilation", +) +@load_example_package('transferable') +def test_compiling_with_nested_installed_packages(project): + source_paths, contract_data = compile_project_contracts(project) + + assert 'owned' in contract_data + assert 'transferable' in contract_data + + +@pytest.mark.skipif( + not solc_supports_standard_json_interface(), + reason="Solc compiler does not support standard json compilation", +) +@load_example_package('owned') +@load_test_contract_fixture('UsesOwned.sol') +def test_compiling_with_import_from_package(project): + source_paths, contract_data = compile_project_contracts(project) + + assert 'UsesOwned' in contract_data + assert 'owned' in contract_data diff --git a/tests/contract-provider/test_get_contract_factory.py b/tests/contract-provider/test_get_contract_factory.py index c4b59d83..9b3d1ab4 100644 --- a/tests/contract-provider/test_get_contract_factory.py +++ b/tests/contract-provider/test_get_contract_factory.py @@ -1,4 +1,3 @@ -import os import pytest from populus.contracts.exceptions import ( @@ -9,6 +8,9 @@ from populus.utils.testing import ( link_bytecode_by_name, ) +from populus.utils.testing import ( + load_example_package, +) def test_get_contract_factory_with_no_dependencies(chain): @@ -64,3 +66,15 @@ def test_get_contract_factory_with_dependency_bytecode_mismatch(chain, with pytest.raises(BytecodeMismatch): provider.get_contract_factory('Multiply13') + + +@pytest.mark.parametrize( + 'contract_type_name', + ('owned', 'owned:owned'), +) +@load_example_package('owned') +def test_get_contract_factory_from_installed_dependency(chain, contract_type_name): + provider = chain.provider + + Owned = provider.get_contract_factory(contract_type_name) + assert Owned.bytecode == chain.project.compiled_contract_data['owned']['bytecode'] diff --git a/tests/contract-provider/test_installed_packages_backend.py b/tests/contract-provider/test_installed_packages_backend.py new file mode 100644 index 00000000..928b14d0 --- /dev/null +++ b/tests/contract-provider/test_installed_packages_backend.py @@ -0,0 +1,139 @@ +import pytest + +import os +import json + +from populus import Project +from populus.utils.config import ( + get_json_config_file_path, +) +from populus.utils.chains import ( + get_chain_definition, +) +from populus.utils.dependencies import ( + get_release_lockfile_path, +) +from populus.utils.packaging import ( + extract_package_metadata, + load_release_lockfile, + write_release_lockfile, +) +from populus.packages.installation import ( + write_installed_packages, +) + + +EXAMPLE_PACKAGES_BASE_PATH = './tests/example-packages' + + +@pytest.fixture() +def with_installed_packages_backend(project_dir): + config_file_path = get_json_config_file_path(project_dir) + config = { + 'version': '1', + 'chains': { + 'tester': { + 'chain': {'class': 'populus.chain.tester.TesterChain'}, + 'web3': {'provider': {'class': 'web3.providers.tester.EthereumTesterProvider'}}, + 'contracts': { + 'backends': { + 'InstalledPackages': { + "class": "populus.contracts.backends.installed_packages.InstalledPackagesBackend", + "priority": 10 + } + } + } + } + } + } + with open(config_file_path, 'w') as config_file: + json.dump(config, config_file) + return config_file_path + + +@pytest.yield_fixture() +def test_chain(project): + project.config['chains.tester.contracts.backends'] = { + 'InstalledPackages': {'$ref': 'contracts.backends.InstalledPackages'}, + 'Memory': {'$ref': 'contracts.backends.Memory'}, + } + project.write_config() + project.load_config() + + with project.get_chain('tester') as chain: + yield chain + + +@pytest.fixture() +def installed_safe_math_lib_dependency(populus_source_root, + test_chain): + chain = test_chain + project = chain.project + assert 'InstalledPackages' in chain.provider.provider_backends + release_lockfile_path = os.path.join( + populus_source_root, + EXAMPLE_PACKAGES_BASE_PATH, + 'safe-math-lib', + '1.0.0.json', + ) + source_file_path = os.path.join( + populus_source_root, + EXAMPLE_PACKAGES_BASE_PATH, + 'safe-math-lib', + 'contracts', + 'SafeMathLib.sol', + ) + release_lockfile = load_release_lockfile(release_lockfile_path) + + with open(source_file_path) as source_file: + source_content = source_file.read() + + package_meta = extract_package_metadata( + ('ipfs://QmfUwis9K2SLwnUh62PDb929JzU5J2aFKd4kS1YErYajdq',), + release_lockfile, + ) + package_data = { + 'meta': package_meta, + 'lockfile': release_lockfile, + 'source_tree': {'./contracts/SafeMathLib.sol': source_content}, + 'dependencies': tuple(), + } + write_installed_packages(project.installed_packages_dir, [package_data]) + assert 'safe-math-lib' in project.installed_dependency_locations + project._cached_compiled_contracts = None + assert 'SafeMathLib' in project.compiled_contract_data + + +@pytest.fixture() +def deployed_safe_math_lib(test_chain, installed_safe_math_lib_dependency): + chain = test_chain + project = chain.project + provider = chain.provider + assert not provider.is_contract_available('SafeMathLib') + + release_lockfile_path = get_release_lockfile_path( + project.installed_dependency_locations['safe-math-lib'], + ) + + release_lockfile = load_release_lockfile(release_lockfile_path) + + chain_definition = get_chain_definition(chain.web3) + SafeMathLibFactory = provider.get_contract_factory('SafeMathLib') + deploy_txn = SafeMathLibFactory.deploy() + contract_address = chain.wait.for_contract_address(deploy_txn) + release_lockfile['deployments'].update({ + chain_definition: { + 'SafeMathLib': { + 'address': contract_address, + 'contract_type': 'SafeMathLib', + 'runtime_bytecode': SafeMathLibFactory.bytecode_runtime, + }, + } + }) + write_release_lockfile(release_lockfile, release_lockfile_path) + + +def test_getting_contract_address_from_installed_package(test_chain, + deployed_safe_math_lib): + chain = test_chain + assert chain.provider.is_contract_available('SafeMathLib') diff --git a/tests/contracts-utils/test_is_dependency_contract_name.py b/tests/contracts-utils/test_is_dependency_contract_name.py new file mode 100644 index 00000000..65ee54f2 --- /dev/null +++ b/tests/contracts-utils/test_is_dependency_contract_name.py @@ -0,0 +1,19 @@ +import pytest + +from populus.utils.contracts import ( + is_dependency_contract_name, +) + + +@pytest.mark.parametrize( + 'value,expected', + ( + ('owned', False), + ('owned:owned', True), + ('Owned:owned', False), + ('owned:owned-with-hyphen', False), + ) +) +def testis_dependency_contract_name(value, expected): + actual = is_dependency_contract_name(value) + assert actual is expected diff --git a/tests/dependencies-utils/test_build_dependency_namespace_lookups.py b/tests/dependencies-utils/test_build_dependency_namespace_lookups.py new file mode 100644 index 00000000..7068f1f1 --- /dev/null +++ b/tests/dependencies-utils/test_build_dependency_namespace_lookups.py @@ -0,0 +1,62 @@ +import os + +from populus.utils.dependencies import ( + build_dependency_namespace_lookups, + recursive_find_installed_dependency_base_dirs, + get_installed_packages_dir, +) +from populus.utils.testing import ( + load_example_package, +) + + +def test_building_dependency_namespace_lookup_with_no_dependencies(): + namespace_lookups = build_dependency_namespace_lookups(tuple()) + assert not namespace_lookups + + +@load_example_package('owned') +@load_example_package('standard-token') +def test_building_dependency_namespace_lookup_with_flat_dependencies(project): + owned_base_dir = os.path.join(project.installed_packages_dir, 'owned') + standard_token_base_dir = os.path.join(project.installed_packages_dir, 'standard-token') + + dependency_base_dirs = recursive_find_installed_dependency_base_dirs( + project.installed_packages_dir, + ) + namespace_lookups = build_dependency_namespace_lookups(dependency_base_dirs) + + assert namespace_lookups[owned_base_dir] == 'owned' + assert namespace_lookups[standard_token_base_dir] == 'standard-token' + + +@load_example_package('owned') +@load_example_package('transferable') +@load_example_package('standard-token') +@load_example_package('piper-coin') +def test_building_dependency_namespace_lookup_with_nested_dependencies(project): + owned_base_dir = os.path.join(project.installed_packages_dir, 'owned') + transferable_base_dir = os.path.join(project.installed_packages_dir, 'transferable') + transferable_owned_base_dir = os.path.join( + get_installed_packages_dir(transferable_base_dir), + 'owned', + ) + standard_token_base_dir = os.path.join(project.installed_packages_dir, 'standard-token') + piper_coin_base_dir = os.path.join(project.installed_packages_dir, 'piper-coin') + piper_coin_standard_token_base_dir = os.path.join( + get_installed_packages_dir(piper_coin_base_dir), + 'standard-token', + ) + + dependency_base_dirs = recursive_find_installed_dependency_base_dirs( + project.installed_packages_dir, + ) + namespace_lookups = build_dependency_namespace_lookups(dependency_base_dirs) + + assert namespace_lookups[owned_base_dir] == 'owned' + assert namespace_lookups[transferable_base_dir] == 'transferable' + assert namespace_lookups[standard_token_base_dir] == 'standard-token' + assert namespace_lookups[piper_coin_base_dir] == 'piper-coin' + + assert namespace_lookups[transferable_owned_base_dir] == 'transferable:owned' + assert namespace_lookups[piper_coin_standard_token_base_dir] == 'piper-coin:standard-token' diff --git a/tests/dependencies-utils/test_recursive_find_installed_dependency_base_dirs.py b/tests/dependencies-utils/test_recursive_find_installed_dependency_base_dirs.py new file mode 100644 index 00000000..a6379710 --- /dev/null +++ b/tests/dependencies-utils/test_recursive_find_installed_dependency_base_dirs.py @@ -0,0 +1,35 @@ +import os + +from populus.utils.dependencies import ( + recursive_find_installed_dependency_base_dirs, + get_installed_packages_dir, +) +from populus.utils.testing import ( + load_example_package, +) + + +def test_with_no_packages(project): + base_dirs = recursive_find_installed_dependency_base_dirs(project.installed_packages_dir) + assert base_dirs == tuple() + + +@load_example_package('owned') +def test_with_single_package_installed(project): + assert 'owned' in project.installed_dependency_locations + base_dirs = recursive_find_installed_dependency_base_dirs(project.installed_packages_dir) + assert project.installed_dependency_locations['owned'] in base_dirs + + +@load_example_package('transferable') +def test_with_nested_package_installation(project): + assert 'transferable' in project.installed_dependency_locations + assert 'owned' not in project.installed_dependency_locations + base_dirs = recursive_find_installed_dependency_base_dirs(project.installed_packages_dir) + assert project.installed_dependency_locations['transferable'] in base_dirs + + owned_base_path = os.path.join( + get_installed_packages_dir(project.installed_dependency_locations['transferable']), + 'owned', + ) + assert owned_base_path in base_dirs diff --git a/tests/example-packages/escrow/1.0.0.json b/tests/example-packages/escrow/1.0.0.json new file mode 100644 index 00000000..d84d83ec --- /dev/null +++ b/tests/example-packages/escrow/1.0.0.json @@ -0,0 +1,127 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "escrow", + "sources": { + "./contracts/SafeSendLib.sol": "ipfs://QmcnzhWjaV71qzKntv4burxyix9W2yBA2LrJB4k99tGqkZ", + "./contracts/Escrow.sol": "ipfs://QmSwmFLT5B5aag485ZWvHmfdC1cU5EFdcqs1oqE5KsxGMw" + }, + "contract_types": { + "SafeSendLib": { + "bytecode": "0x606060405234610000575b60ad806100176000396000f36504062dabbdf050606060405260e060020a60003504639341231c81146024575b6000565b60306004356024356044565b604080519115158252519081900360200190f35b60003073ffffffffffffffffffffffffffffffffffffffff16318211156068576000565b60405173ffffffffffffffffffffffffffffffffffffffff84169083156108fc029084906000818181858888f19350505050151560a3576000565b5060015b9291505056", + "runtime_bytecode": "0x6504062dabbdf050606060405260e060020a60003504639341231c81146024575b6000565b60306004356024356044565b604080519115158252519081900360200190f35b60003073ffffffffffffffffffffffffffffffffffffffff16318211156068576000565b60405173ffffffffffffffffffffffffffffffffffffffff84169083156108fc029084906000818181858888f19350505050151560a3576000565b5060015b9291505056", + "abi": [ + { + "constant": false, + "inputs": [ + {"name": "recipient", "type": "address"}, + {"name": "value", "type": "uint256"} + ], + "name": "sendOrThrow", + "outputs": [ + {"name": "", "type": "bool"} + ], + "payable": false, + "type": "function" + } + ], + "compiler": { + "type": "solc", + "version": "0.4.6+commit.2dabbdf0.Darwin.appleclang", + "settings": { + "optimize": true + } + }, + "natspec": { + "author": "Piper Merriam ", + "methods": { + "sendOrThrow(address,uint256)": { + "details": "Attempts to send the specified amount to the recipient throwing an error if it fails", + "params": { + "recipient": "The address that the funds should be to.", + "value": "The amount in wei that should be sent." + } + } + }, + "title": "Library for safe sending of ether." + } + }, + "Escrow": { + "runtime_bytecode": "0x606060405260e060020a600035046366d003ac811461003457806367e404ce1461005d57806369d8957514610086575b610000565b3461000057610041610095565b60408051600160a060020a039092168252519081900360200190f35b34610000576100416100a4565b60408051600160a060020a039092168252519081900360200190f35b34610000576100936100b3565b005b600154600160a060020a031681565b600054600160a060020a031681565b60005433600160a060020a039081169116141561014857600154604080516000602091820152815160e260020a6324d048c7028152600160a060020a03938416600482015230909316316024840152905173__SafeSendLib___________________________92639341231c926044808301939192829003018186803b156100005760325a03f41561000057506101e2915050565b60015433600160a060020a039081169116141561002f576000805460408051602090810193909352805160e260020a6324d048c7028152600160a060020a039283166004820152309092163160248301525173__SafeSendLib___________________________92639341231c9260448082019391829003018186803b156100005760325a03f41561000057506101e2915050565b610000565b5b5b56", + "abi": [ + { + "constant": true, + "inputs": [], + "name": "recipient", + "outputs": [ + {"name": "","type": "address"} + ], + "payable": false, + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "sender", + "outputs": [ + {"name": "","type": "address"} + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [], + "name": "releaseFunds", + "outputs": [], + "payable": false, + "type": "function" + }, + { + "inputs": [ + {"name": "_recipient","type": "address"} + ], + "payable": true, + "type": "constructor" + } + ], + "compiler": { + "type": "solc", + "version": "0.4.6+commit.2dabbdf0.Darwin.appleclang", + "settings": { + "optimize": true + } + }, + "natspec": { + "author": "Piper Merriam ", + "methods": { + "releaseFunds()": { + "details": "Releases the escrowed funds to the other party.", + "notice": "This will release the escrowed funds to the other party." + } + }, + "title": "Contract for holding funds in escrow between two semi trusted parties." + } + } + }, + "deployments": { + "blockchain://41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d/block/e76cf1f29a4689f836d941d7ffbad4e4b32035a441a509dc53150c2165f8e90d": { + "SafeMathLib": { + "contract_type": "SafeSendLib", + "address": "0x80d7f7a33e551455a909e1b914c4fd4e6d0074cc", + "transaction": "0x74561167f360eaa20ea67bd4b4bf99164aabb36b2287061e86137bfa0d35d5fb", + "block": "0x46554e3cf7b768b1cc1990ad4e2d3a137fe9373c0dda765f4db450cd5fa64102" + }, + "Escrow": { + "contract_type": "Escrow", + "address": "0x35b6b723786fd8bd955b70db794a1f1df56e852f", + "transaction": "0x905fbbeb6069d8b3c8067d233f58b0196b43da7a20b839f3da41f69c87da2037", + "block": "0x9b39dcab3d665a51755dedef56e7c858702f5817ce926a0cd8ff3081c5159b7f", + "link_dependencies": [ + {"offset": 524, "value": "SafeSendLib"}, + {"offset": 824, "value": "SafeSendLib"} + ] + } + } + } +} diff --git a/tests/example-packages/escrow/contracts/Escrow.sol b/tests/example-packages/escrow/contracts/Escrow.sol new file mode 100644 index 00000000..ad7b63de --- /dev/null +++ b/tests/example-packages/escrow/contracts/Escrow.sol @@ -0,0 +1,31 @@ +pragma solidity ^0.4.0; + + +import {SafeSendLib} from "./SafeSendLib.sol"; + + +/// @title Contract for holding funds in escrow between two semi trusted parties. +/// @author Piper Merriam +contract Escrow { + using SafeSendLib for address; + + address public sender; + address public recipient; + + function Escrow(address _recipient) payable { + sender = msg.sender; + recipient = _recipient; + } + + /// @dev Releases the escrowed funds to the other party. + /// @notice This will release the escrowed funds to the other party. + function releaseFunds() { + if (msg.sender == sender) { + recipient.sendOrThrow(this.balance); + } else if (msg.sender == recipient) { + sender.sendOrThrow(this.balance); + } else { + throw; + } + } +} diff --git a/tests/example-packages/escrow/contracts/SafeSendLib.sol b/tests/example-packages/escrow/contracts/SafeSendLib.sol new file mode 100644 index 00000000..5f586558 --- /dev/null +++ b/tests/example-packages/escrow/contracts/SafeSendLib.sol @@ -0,0 +1,17 @@ +pragma solidity ^0.4.0; + + +/// @title Library for safe sending of ether. +/// @author Piper Merriam +library SafeSendLib { + /// @dev Attempts to send the specified amount to the recipient throwing an error if it fails + /// @param recipient The address that the funds should be to. + /// @param value The amount in wei that should be sent. + function sendOrThrow(address recipient, uint value) returns (bool) { + if (value > this.balance) throw; + + if (!recipient.send(value)) throw; + + return true; + } +} diff --git a/tests/example-packages/owned/1.0.0.json b/tests/example-packages/owned/1.0.0.json new file mode 100644 index 00000000..c1bcb0cd --- /dev/null +++ b/tests/example-packages/owned/1.0.0.json @@ -0,0 +1,21 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "owned", + "meta": { + "license": "MIT", + "authors": [ + "Piper Merriam " + ], + "description": "Reusable contracts which implement a privileged'owner' model for authorization", + "keywords": [ + "authorization" + ], + "links": { + "documentation": "ipfs://QmQiqrwqdav5bV8mtv4PqGksGcDWo43f7PAZYwhJqNEv2j" + } + }, + "sources": { + "./contracts/owned.sol": "ipfs://QmUjYUcX9kLv2FQH8nwc3RLLXtU3Yv5XFpvEjFcAKXB6xD" + } +} diff --git a/tests/example-packages/owned/contracts/owned.sol b/tests/example-packages/owned/contracts/owned.sol new file mode 100644 index 00000000..125df5fc --- /dev/null +++ b/tests/example-packages/owned/contracts/owned.sol @@ -0,0 +1,11 @@ +pragma solidity ^0.4.0; + +contract owned { + address owner; + + function owned() { + owner = msg.sender; + } + + modifier onlyowner { if (msg.sender != owner) throw; _; } +} diff --git a/tests/example-packages/piper-coin/1.0.0.json b/tests/example-packages/piper-coin/1.0.0.json new file mode 100644 index 00000000..24d4965d --- /dev/null +++ b/tests/example-packages/piper-coin/1.0.0.json @@ -0,0 +1,24 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "piper-coin", + "deployments": { + "blockchain://41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d/block/cff59cd4bc7077ae557eb39f84f869a1ea7955d52071bad439f0458383a78780": { + "PiperCoin": { + "contract_type": "standard-token:StandardToken", + "address": "0x11cbb0604e47e0f8501b8f56c1c05f92088dc1b0", + "transaction": "0x1f8206683e4b1dea1fd2e7299b7606ff27440f33cb994b42b4ecc4b0f83a210f", + "block": "0xe94a700ef9aa2d7a1b07321838251ea4ade8d4d682121f67899f401433a0d910", + "bytecode": "0x606060405234610000576040516020806106f4833981016040528080519060200190919050505b8060028190555080600060003373ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020819055503373ffffffffffffffffffffffffffffffffffffffff1660007fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef836040518082815260200191505060405180910390a35b505b610638806100bc6000396000f360606040523615610070576000357c010000000000000000000000000000000000000000000000000000000090048063095ea7b31461007557806318160ddd146100b157806323b872dd146100d457806370a0823114610119578063a9059cbb1461014a578063dd62ed3e14610186575b610000565b346100005761009960048080359060200190919080359060200190919050506101c0565b60405180821515815260200191505060405180910390f35b34610000576100be610287565b6040518082815260200191505060405180910390f35b3461000057610101600480803590602001909190803590602001909190803590602001909190505061028d565b60405180821515815260200191505060405180910390f35b34610000576101346004808035906020019091905050610478565b6040518082815260200191505060405180910390f35b346100005761016e60048080359060200190919080359060200190919050506104ac565b60405180821515815260200191505060405180910390f35b34610000576101aa60048080359060200190919080359060200190919050506105dc565b6040518082815260200191505060405180910390f35b600081600160003373ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008573ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020819055508273ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925846040518082815260200191505060405180910390a3600190505b92915050565b60025481565b600081600060008673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205410158015610318575081600160008673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060003373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205410155b80156103245750600082115b156104675781600060008573ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254019250508190555081600060008673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254039250508190555081600160008673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060003373ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825403925050819055508273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef846040518082815260200191505060405180910390a36001905061047156610470565b60009050610471565b5b9392505050565b6000600060008373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205490505b919050565b600081600060003373ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002054101580156104e75750600082115b156105cc5781600060003373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254039250508190555081600060008573ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825401925050819055508273ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef846040518082815260200191505060405180910390a3600190506105d6566105d5565b600090506105d6565b5b92915050565b6000600160008473ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205490505b9291505056", + "runtime_bytecode": "0x60606040523615610070576000357c010000000000000000000000000000000000000000000000000000000090048063095ea7b31461007557806318160ddd146100b157806323b872dd146100d457806370a0823114610119578063a9059cbb1461014a578063dd62ed3e14610186575b610000565b346100005761009960048080359060200190919080359060200190919050506101c0565b60405180821515815260200191505060405180910390f35b34610000576100be610287565b6040518082815260200191505060405180910390f35b3461000057610101600480803590602001909190803590602001909190803590602001909190505061028d565b60405180821515815260200191505060405180910390f35b34610000576101346004808035906020019091905050610478565b6040518082815260200191505060405180910390f35b346100005761016e60048080359060200190919080359060200190919050506104ac565b60405180821515815260200191505060405180910390f35b34610000576101aa60048080359060200190919080359060200190919050506105dc565b6040518082815260200191505060405180910390f35b600081600160003373ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008573ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020819055508273ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925846040518082815260200191505060405180910390a3600190505b92915050565b60025481565b600081600060008673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205410158015610318575081600160008673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060003373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205410155b80156103245750600082115b156104675781600060008573ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254019250508190555081600060008673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254039250508190555081600160008673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060003373ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825403925050819055508273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef846040518082815260200191505060405180910390a36001905061047156610470565b60009050610471565b5b9392505050565b6000600060008373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205490505b919050565b600081600060003373ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002054101580156104e75750600082115b156105cc5781600060003373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254039250508190555081600060008573ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825401925050819055508273ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef846040518082815260200191505060405180910390a3600190506105d6566105d5565b600090506105d6565b5b92915050565b6000600160008473ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008373ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205490505b9291505056", + "compiler": { + "type": "solc", + "version": "0.4.6+commit.2dabbdf0.Darwin.appleclang" + } + } + } + }, + "build_dependencies": { + "standard-token": "ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ" + } +} diff --git a/tests/example-packages/safe-math-lib/1.0.0.json b/tests/example-packages/safe-math-lib/1.0.0.json new file mode 100644 index 00000000..18c44b23 --- /dev/null +++ b/tests/example-packages/safe-math-lib/1.0.0.json @@ -0,0 +1,79 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "safe-math-lib", + "sources": { + "./contracts/SafeMathLib.sol": "ipfs://QmVN1p6MmMLYcSq1VTmaSDLC3xWuAUwEFBFtinfzpmtzQG" + }, + "contract_types": { + "SafeMathLib": { + "bytecode": "0x606060405234610000575b60a9806100176000396000f36504062dabbdf050606060405260e060020a6000350463a293d1e88114602e578063e6cb901314604c575b6000565b603a600435602435606a565b60408051918252519081900360200190f35b603a6004356024356088565b60408051918252519081900360200190f35b6000828211602a57508082036081566081565b6000565b5b92915050565b6000828284011115602a57508181016081566081565b6000565b5b9291505056", + "runtime_bytecode": "0x6504062dabbdf050606060405260e060020a6000350463a293d1e88114602e578063e6cb901314604c575b6000565b603a600435602435606a565b60408051918252519081900360200190f35b603a6004356024356088565b60408051918252519081900360200190f35b6000828211602a57508082036081566081565b6000565b5b92915050565b6000828284011115602a57508181016081566081565b6000565b5b9291505056", + "abi": [ + { + "constant": false, + "inputs": [ + {"name": "a","type": "uint256"}, + {"name": "b","type": "uint256"} + ], + "name": "safeSub", + "outputs": [ + {"name": "","type": "uint256"} + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + {"name": "a","type": "uint256"}, + {"name": "b","type": "uint256"} + ], + "name": "safeAdd", + "outputs": [ + {"name": "","type": "uint256"} + ], + "payable": false, + "type": "function" + } + ], + "compiler": { + "type": "solc", + "version": "0.4.6+commit.2dabbdf0.Darwin.appleclang", + "settings": { + "optimize": true + } + }, + "natspec": { + "title": "Safe Math Library", + "author": "Piper Merriam ", + "methods": { + "safeAdd(uint256,uint256)": { + "details": "Subtracts b from a, throwing an error if the operation would cause an underflow.", + "params": { + "a": "The number to be subtracted from", + "b": "The amount that should be subtracted" + } + }, + "safeSub(uint256,uint256)": { + "details": "Adds a and b, throwing an error if the operation would cause an overflow.", + "params": { + "a": "The first number to add", + "b": "The second number to add" + } + } + } + } + } + }, + "deployments": { + "blockchain://41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d/block/1e96de11320c83cca02e8b9caf3e489497e8e432befe5379f2f08599f8aecede": { + "SafeMathLib": { + "contract_type": "SafeMathLib", + "address": "0x8d2c532d7d211816a2807a411f947b211569b68c", + "transaction": "0xaceef751507a79c2dee6aa0e9d8f759aa24aab081f6dcf6835d792770541cb2b", + "block": "0x420cb2b2bd634ef42f9082e1ee87a8d4aeeaf506ea5cdeddaa8ff7cbf911810c" + } + } + } +} diff --git a/tests/example-packages/safe-math-lib/contracts/SafeMathLib.sol b/tests/example-packages/safe-math-lib/contracts/SafeMathLib.sol new file mode 100644 index 00000000..1b610708 --- /dev/null +++ b/tests/example-packages/safe-math-lib/contracts/SafeMathLib.sol @@ -0,0 +1,28 @@ +pragma solidity ^0.4.0; + + +/// @title Safe Math Library +/// @author Piper Merriam +library SafeMathLib { + /// @dev Subtracts b from a, throwing an error if the operation would cause an underflow. + /// @param a The number to be subtracted from + /// @param b The amount that should be subtracted + function safeAdd(uint a, uint b) returns (uint) { + if (a + b > a) { + return a + b; + } else { + throw; + } + } + + /// @dev Adds a and b, throwing an error if the operation would cause an overflow. + /// @param a The first number to add + /// @param b The second number to add + function safeSub(uint a, uint b) returns (uint) { + if (b <= a) { + return a - b; + } else { + throw; + } + } +} diff --git a/tests/example-packages/standard-token/1.0.0.json b/tests/example-packages/standard-token/1.0.0.json new file mode 100644 index 00000000..c56ad51d --- /dev/null +++ b/tests/example-packages/standard-token/1.0.0.json @@ -0,0 +1,55 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "standard-token", + "sources": { + "./contracts/AbstractToken.sol": "ipfs://QmQMXDprXxCunfQjA42LXZtzL6YMP8XTuGDB6AjHzpYHgk", + "./contracts/StandardToken.sol": "ipfs://QmNLr7DzmiaQvk25C8bADBnh9bF5V3JfbwHS49kyoGGEHz" + }, + "contract_types": { + "StandardToken": { + "abi": [{"constant":false,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[{"name":"success","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"success","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"balance","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[{"name":"success","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"remaining","type":"uint256"}],"payable":false,"type":"function"},{"inputs":[{"name":"_totalSupply","type":"uint256"}],"payable":false,"type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"name":"from","type":"address"},{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"spender","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Approval","type":"event"}], + "natspec": { + "author": "Stefan George - ", + "title": "Standard token contract", + "methods": { + "allowance(address,address)": { + "details": "Returns number of allowed tokens for given address.", + "params": { + "_owner": "Address of token owner.", + "_spender": "Address of token spender." + } + }, + "approve(address,uint256)": { + "details": "Sets approved amount of tokens for spender. Returns success.", + "params": { + "_spender": "Address of allowed account.", + "_value": "Number of approved tokens." + } + }, + "balanceOf(address)": { + "details": "Returns number of tokens owned by given address.", + "params": { + "_owner": "Address of token owner." + } + }, + "transfer(address,uint256)": { + "details": "Transfers sender's tokens to a given address. Returns success.", + "params": { + "_to": "Address of token receiver.", + "_value": "Number of tokens to transfer." + } + }, + "transferFrom(address,address,uint256)": { + "details": "Allows allowed third party to transfer tokens from one address to another. Returns success.", + "params": { + "_from": "Address from where tokens are withdrawn.", + "_to": "Address to where tokens are sent.", + "_value": "Number of tokens to transfer." + } + } + } + } + } + } +} diff --git a/tests/example-packages/standard-token/contracts/AbstractToken.sol b/tests/example-packages/standard-token/contracts/AbstractToken.sol new file mode 100644 index 00000000..3ac28e62 --- /dev/null +++ b/tests/example-packages/standard-token/contracts/AbstractToken.sol @@ -0,0 +1,19 @@ +pragma solidity ^0.4.0; + + +/// Implements ERC 20 Token standard: https://github.com/ethereum/EIPs/issues/20 + +/// @title Abstract token contract - Functions to be implemented by token contracts. +/// @author Stefan George - +contract Token { + // This is not an abstract function, because solc won't recognize generated getter functions for public variables as functions + function totalSupply() constant returns (uint256 supply) {} + function balanceOf(address owner) constant returns (uint256 balance); + function transfer(address to, uint256 value) returns (bool success); + function transferFrom(address from, address to, uint256 value) returns (bool success); + function approve(address spender, uint256 value) returns (bool success); + function allowance(address owner, address spender) constant returns (uint256 remaining); + + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); +} diff --git a/tests/example-packages/standard-token/contracts/StandardToken.sol b/tests/example-packages/standard-token/contracts/StandardToken.sol new file mode 100644 index 00000000..5e3363b6 --- /dev/null +++ b/tests/example-packages/standard-token/contracts/StandardToken.sol @@ -0,0 +1,83 @@ +pragma solidity ^0.4.0; + + +import "./AbstractToken.sol"; + + +/// @title Standard token contract +/// @author Stefan George - +contract StandardToken is Token { + /* + * Data structures + */ + mapping (address => uint256) balances; + mapping (address => mapping (address => uint256)) allowed; + uint256 public totalSupply; + + function StandardToken(uint _totalSupply) { + totalSupply = _totalSupply; + balances[msg.sender] = _totalSupply; + Transfer(0x0, msg.sender, _totalSupply); + } + + /* + * Read and write storage functions + */ + /// @dev Transfers sender's tokens to a given address. Returns success. + /// @param _to Address of token receiver. + /// @param _value Number of tokens to transfer. + function transfer(address _to, uint256 _value) returns (bool success) { + if (balances[msg.sender] >= _value && _value > 0) { + balances[msg.sender] -= _value; + balances[_to] += _value; + Transfer(msg.sender, _to, _value); + return true; + } + else { + return false; + } + } + + /// @dev Allows allowed third party to transfer tokens from one address to another. Returns success. + /// @param _from Address from where tokens are withdrawn. + /// @param _to Address to where tokens are sent. + /// @param _value Number of tokens to transfer. + function transferFrom(address _from, address _to, uint256 _value) returns (bool success) { + if (balances[_from] >= _value && allowed[_from][msg.sender] >= _value && _value > 0) { + balances[_to] += _value; + balances[_from] -= _value; + allowed[_from][msg.sender] -= _value; + Transfer(_from, _to, _value); + return true; + } + else { + return false; + } + } + + /// @dev Returns number of tokens owned by given address. + /// @param _owner Address of token owner. + function balanceOf(address _owner) constant returns (uint256 balance) { + return balances[_owner]; + } + + /// @dev Sets approved amount of tokens for spender. Returns success. + /// @param _spender Address of allowed account. + /// @param _value Number of approved tokens. + function approve(address _spender, uint256 _value) returns (bool success) { + allowed[msg.sender][_spender] = _value; + Approval(msg.sender, _spender, _value); + return true; + } + + /* + * Read storage functions + */ + /// @dev Returns number of allowed tokens for given address. + /// @param _owner Address of token owner. + /// @param _spender Address of token spender. + function allowance(address _owner, address _spender) constant returns (uint256 remaining) { + return allowed[_owner][_spender]; + } + +} diff --git a/tests/example-packages/transferable/1.0.0.json b/tests/example-packages/transferable/1.0.0.json new file mode 100644 index 00000000..2fc010c7 --- /dev/null +++ b/tests/example-packages/transferable/1.0.0.json @@ -0,0 +1,21 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "transferable", + "meta": { + "license": "MIT", + "authors": [ + "Piper Merriam " + ], + "description": "Reusable contracts which implement a privileged'owner' model for authorization with functionality for transfering ownership.", + "keywords": [ + "authorization" + ] + }, + "sources": { + "./contracts/transferable.sol": "ipfs://QmZ6Zg1iEejuJ18LFczowe7dyaxXm4KC4xTgnCkqwJZmAp" + }, + "build_dependencies": { + "owned": "ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND" + } +} diff --git a/tests/example-packages/transferable/contracts/transferable.sol b/tests/example-packages/transferable/contracts/transferable.sol new file mode 100644 index 00000000..281da342 --- /dev/null +++ b/tests/example-packages/transferable/contracts/transferable.sol @@ -0,0 +1,13 @@ +pragma solidity ^0.4.0; + +import {owned} from "owned/contracts/owned.sol"; + +contract transferable is owned { + event OwnerChanged(address indexed prevOwner, address indexed newOwner); + + function transferOwner(address newOwner) public onlyowner returns (bool) { + OwnerChanged(owner, newOwner); + owner = newOwner; + return true; + } +} diff --git a/tests/example-packages/wallet/1.0.0.json b/tests/example-packages/wallet/1.0.0.json new file mode 100644 index 00000000..c2a3bdab --- /dev/null +++ b/tests/example-packages/wallet/1.0.0.json @@ -0,0 +1,100 @@ +{ + "lockfile_version": "1", + "version": "1.0.0", + "package_name": "wallet", + "sources": { + "./contracts/Wallet.sol": "ipfs://QmYKibsXPSTR5UjywQHX8SM4za1K3QHadtFGWmZqGA4uE9" + }, + "contract_types": { + "Wallet": { + "bytecode": "0x60606040525b60008054600160a060020a0319166c01000000000000000000000000338102041790555b5b610224806100386000396000f3606060405236156100355760e060020a6000350463095ea7b381146100435780632e1a7d4d1461006a578063d0679d341461008e575b34610000576100415b5b565b005b34610000576100566004356024356100b5565b604080519115158252519081900360200190f35b34610000576100566004356100f8565b604080519115158252519081900360200190f35b34610000576100566004356024356101da565b604080519115158252519081900360200190f35b6000805433600160a060020a039081169116146100d157610000565b50600160a060020a03821660009081526001602081905260409091208290555b5b92915050565b600160a060020a0333166000908152600160209081526040808320548151830184905281517fa293d1e8000000000000000000000000000000000000000000000000000000008152600481019190915260248101859052905173__SafeMathLib___________________________9263a293d1e89260448082019391829003018186803b156100005760325a03f415610000575050604080518051600160a060020a0333166000818152600160205293842091909155925084156108fc0291859190818181858888f1935050505015156101d157610000565b5060015b919050565b6000805433600160a060020a039081169116146101f657610000565b604051600160a060020a0384169083156108fc029084906000818181858888f19450505050505b5b9291505056", + "runtime_bytecode": "0x606060405236156100355760e060020a6000350463095ea7b381146100435780632e1a7d4d1461006a578063d0679d341461008e575b34610000576100415b5b565b005b34610000576100566004356024356100b5565b604080519115158252519081900360200190f35b34610000576100566004356100f8565b604080519115158252519081900360200190f35b34610000576100566004356024356101da565b604080519115158252519081900360200190f35b6000805433600160a060020a039081169116146100d157610000565b50600160a060020a03821660009081526001602081905260409091208290555b5b92915050565b600160a060020a0333166000908152600160209081526040808320548151830184905281517fa293d1e8000000000000000000000000000000000000000000000000000000008152600481019190915260248101859052905173__SafeMathLib___________________________9263a293d1e89260448082019391829003018186803b156100005760325a03f415610000575050604080518051600160a060020a0333166000818152600160205293842091909155925084156108fc0291859190818181858888f1935050505015156101d157610000565b5060015b919050565b6000805433600160a060020a039081169116146101f657610000565b604051600160a060020a0384169083156108fc029084906000818181858888f19450505050505b5b9291505056", + "abi": [ + { + "constant": false, + "inputs": [ + {"name": "recipient", "type": "address"}, + {"name": "value", "type": "uint256"} + ], + "name": "approve", + "outputs": [ + {"name": "", "type": "bool"} + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + {"name": "value", "type": "uint256"} + ], + "name": "withdraw", + "outputs": [ + {"name": "", "type": "bool"} + ], + "payable": false, + "type": "function" + }, + { + "constant": false, + "inputs": [ + {"name": "recipient", "type": "address"}, + {"name": "value", "type": "uint256"} + ], + "name": "send", + "outputs": [ + {"name": "", "type": "bool"} + ], + "payable": false, + "type": "function" + }, + { + "payable": false, + "type": "fallback" + } + ], + "natspec": { + "compiler": { + "type": "solc", + "version": "0.4.6+commit.2dabbdf0.Darwin.appleclang", + "settings": { + "optimize": true + } + }, + "author": "Piper Merriam ", + "methods": { + "approve(address,uint256)": { + "details": "Sets recipient to be approved to withdraw the specified amount", + "notice": "This will set the recipient to be approved to withdraw the specified amount." + }, + "send(address,uint256)": { + "details": "Sends the recipient the specified amount", + "notice": "This will send the reciepient the specified amount." + }, + "withdraw(uint256)": { + "details": "Lets caller withdraw up to their approved amount", + "notice": "This will withdraw provided value, deducting it from your total allowance." + } + }, + "title": "Contract for holding funds in escrow between two semi trusted parties." + } + } + }, + "deployments": { + "blockchain://41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d/block/3ececfa0e03bce2d348279316100913c42ca2dcd51b8bc8d2d87ef2dc6a479ff": { + "Wallet": { + "contract_type": "Wallet", + "address": "0xcd0f8d7dab6c682d3726693ef3c7aaacc6431d1c", + "transaction": "0x5c113857925ae0d866341513bb0732cd799ebc1c18fcec253bbc41d2a029acd4", + "block": "0xccd130623ad3b25a357ead2ecfd22d38756b2e6ac09b77a37bd0ecdf16249765", + "link_dependencies": [ + {"offset": 678, "value": "safe-math-lib:SafeMathLib"} + ] + } + } + }, + "build_dependencies": { + "owned": "ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND", + "safe-math-lib": "ipfs://QmfUwis9K2SLwnUh62PDb929JzU5J2aFKd4kS1YErYajdq" + } +} diff --git a/tests/example-packages/wallet/contracts/Wallet.sol b/tests/example-packages/wallet/contracts/Wallet.sol new file mode 100644 index 00000000..d518b86e --- /dev/null +++ b/tests/example-packages/wallet/contracts/Wallet.sol @@ -0,0 +1,39 @@ +pragma solidity ^0.4.0; + + +import {SafeMathLib} from "safe-math-lib/contracts/SafeMathLib.sol"; +import {owned} from "owned/contracts/owned.sol"; + + +/// @title Contract for holding funds in escrow between two semi trusted parties. +/// @author Piper Merriam +contract Wallet is owned { + using SafeMathLib for uint; + + mapping (address => uint) allowances; + + /// @dev Fallback function for depositing funds + function() { + } + + /// @dev Sends the recipient the specified amount + /// @notice This will send the reciepient the specified amount. + function send(address recipient, uint value) public onlyowner returns (bool) { + return recipient.send(value); + } + + /// @dev Sets recipient to be approved to withdraw the specified amount + /// @notice This will set the recipient to be approved to withdraw the specified amount. + function approve(address recipient, uint value) public onlyowner returns (bool) { + allowances[recipient] = value; + return true; + } + + /// @dev Lets caller withdraw up to their approved amount + /// @notice This will withdraw provided value, deducting it from your total allowance. + function withdraw(uint value) public returns (bool) { + allowances[msg.sender] = allowances[msg.sender].safeSub(value); + if (!msg.sender.send(value)) throw; + return true; + } +} diff --git a/tests/fixtures/UsesOwned.sol b/tests/fixtures/UsesOwned.sol new file mode 100644 index 00000000..f1598fd0 --- /dev/null +++ b/tests/fixtures/UsesOwned.sol @@ -0,0 +1,9 @@ +pragma solidity ^0.4.0; + +import {owned} from "owned/contracts/owned.sol"; + + +contract UsesOwned is owned { + function UsesOwned() { + } +} diff --git a/tests/fixtures/UsesSafeMathLib.sol b/tests/fixtures/UsesSafeMathLib.sol new file mode 100644 index 00000000..e5befbba --- /dev/null +++ b/tests/fixtures/UsesSafeMathLib.sol @@ -0,0 +1,12 @@ +pragma solidity ^0.4.0; + +import {SafeMathLib} from "safe-math-lib/contracts/SafeMathLib.sol"; + + +contract UsesSafeMathLib { + using SafeMathLib for uint; + + function add7(uint v) constant returns (uint) { + return v.safeAdd(7); + } +} diff --git a/tests/fixtures/UsesStandardToken.sol b/tests/fixtures/UsesStandardToken.sol new file mode 100644 index 00000000..6362b0a2 --- /dev/null +++ b/tests/fixtures/UsesStandardToken.sol @@ -0,0 +1,7 @@ +pragma solidity ^0.4.0; + +import {StandardToken} from "standard-token/contracts/StandardToken.sol"; + + +contract UsesStandardToken is StandardToken(1000000) { +} diff --git a/tests/functional-utils/test_star_apply.py b/tests/functional-utils/test_star_apply.py new file mode 100644 index 00000000..b4722b1e --- /dev/null +++ b/tests/functional-utils/test_star_apply.py @@ -0,0 +1,11 @@ +from populus.utils.functional import ( + star_apply, +) + + +def test_star_apply(): + def fn(*args): + return args + + assert fn(['a', 'b']) == (['a', 'b'],) + assert star_apply(fn)(['a', 'b']) == ('a', 'b') diff --git a/tests/ipfs-utils/test_create_ipfs_uri.py b/tests/ipfs-utils/test_create_ipfs_uri.py new file mode 100644 index 00000000..59eca371 --- /dev/null +++ b/tests/ipfs-utils/test_create_ipfs_uri.py @@ -0,0 +1,15 @@ +import pytest + +from populus.utils.ipfs import create_ipfs_uri + + +@pytest.mark.parametrize( + 'value,expected', + ( + ('QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', 'ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u'), + ('QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', 'ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme'), + ) +) +def test_create_ipfs_uri(value, expected): + actual = create_ipfs_uri(value) + assert actual == expected diff --git a/tests/ipfs-utils/test_extract_ipfs_path_from_uri.py b/tests/ipfs-utils/test_extract_ipfs_path_from_uri.py new file mode 100644 index 00000000..0f2e1027 --- /dev/null +++ b/tests/ipfs-utils/test_extract_ipfs_path_from_uri.py @@ -0,0 +1,62 @@ +import pytest + +from populus.utils.ipfs import extract_ipfs_path_from_uri + + +@pytest.mark.parametrize( + 'value,expected', + ( + ( + 'ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', + ), + ( + 'ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', + ), + ( + 'ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', + ), + ( + 'ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', + ), + ( + 'ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', + ), + ( + 'ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', + ), + ( + 'ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', + ), + ( + 'ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', + ), + ( + 'ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', + ), + ( + 'ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', + ), + ( + 'ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', + ), + ( + 'ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', + 'QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', + ), + ) +) +def test_extract_ipfs_path_from_uri(value, expected): + actual = extract_ipfs_path_from_uri(value) + assert actual == expected + diff --git a/tests/ipfs-utils/test_generate_ipfs_multihash.py b/tests/ipfs-utils/test_generate_ipfs_multihash.py new file mode 100644 index 00000000..5d2dd936 --- /dev/null +++ b/tests/ipfs-utils/test_generate_ipfs_multihash.py @@ -0,0 +1,19 @@ +import pytest + +from populus.utils.ipfs import ( + generate_file_hash, +) + + +@pytest.mark.parametrize( + "file_name,file_contents,expected", + ( + ("test-1.txt", "piper\n", "QmUdxEGxvp71kqYLkA91mtNg9QRRSPBtA3UV6VuYhoP7DB"), + ("test-2.txt", "pipermerriam\n", "QmXqrQR7EMePe9LCRUVrfkxYg5EHRNpcA1PZnN4AnbM9DW"), + ("test-3.txt", "this is a test file for ipfs hash generation\n", "QmYknNUKXWSaxfCWVgHd8uVCYHhzPerVCLvCCBedWtqbnv"), + ), +) +def test_generate_file_hash(project_dir, write_project_file, file_name, file_contents, expected): + write_project_file(file_name, file_contents) + ipfs_multihash = generate_file_hash(file_name) + assert ipfs_multihash == expected diff --git a/tests/ipfs-utils/test_is_ipfs_uri.py b/tests/ipfs-utils/test_is_ipfs_uri.py new file mode 100644 index 00000000..d6d9cae7 --- /dev/null +++ b/tests/ipfs-utils/test_is_ipfs_uri.py @@ -0,0 +1,34 @@ +import pytest + +from populus.utils.ipfs import is_ipfs_uri + + +@pytest.mark.parametrize( + 'value,expected', + ( + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', True), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', True), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', True), + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', True), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', True), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', True), + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', True), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', True), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', True), + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', True), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', True), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', True), + # malformed + ('ipfs//QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('ipfs/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('ipfsQmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + # HTTP + ('http://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + ('https://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + # No hash + ('ipfs://', False), + ) +) +def test_is_ipfs_uri(value, expected): + actual = is_ipfs_uri(value) + assert actual is expected diff --git a/tests/packaging-utils/conftest.py b/tests/packaging-utils/conftest.py new file mode 100644 index 00000000..2475c41b --- /dev/null +++ b/tests/packaging-utils/conftest.py @@ -0,0 +1,186 @@ +import pytest + +import os +import json +from collections import OrderedDict + +from eth_utils import ( + to_dict, +) + +from populus import Project + +from populus.packages.backends.ipfs import BaseIPFSPackageBackend +from populus.packages.backends.index import BasePackageIndexBackend +from populus.packages.backends.manifest import LocalManifestBackend +from populus.packages.backends.lockfile import LocalFilesystemLockfileBackend + +from populus.utils.filesystem import ( + find_solidity_source_files, + is_same_path, +) +from populus.utils.dependencies import ( + get_dependency_base_dir, + get_build_identifier, + get_install_identifier, + get_release_lockfile_path, + get_installed_packages_dir, +) +from populus.utils.ipfs import ( + is_ipfs_uri, + generate_file_hash, + extract_ipfs_path_from_uri, +) +from populus.utils.packaging import ( + is_aliased_ipfs_uri, + load_release_lockfile, +) + + +class MockPackageIndexBackend(BasePackageIndexBackend): + packages = None + + def setup_backend(self): + self.packages = {} + + def publish_release_lockfile(self, release_lockfile, release_lockfile_uri): + package_name = release_lockfile['package_name'] + self.packages.setdefault(package_name, {}) + if release_lockfile['version'] in self.packages[package_name]: + raise ValueError("Cannot overwrite release") + self.packages[package_name][release_lockfile['version']] = release_lockfile_uri + + def is_known_package_name(self, package_name): + return package_name in self.packages + + def get_all_versions(self, package_name): + return (self.packages[package_name].keys()) + + def get_release_lockfile_for_version(self, package_name, version): + return self.packages[package_name][version] + + +class MockIPFSBackend(BaseIPFSPackageBackend): + files = None + + def setup_backend(self): + self.files = {} + + @to_dict + def resolve_package_source_tree(self, release_lockfile): + sources = release_lockfile['sources'] + + for source_path, source_value in sources.items(): + if is_ipfs_uri(source_value): + ipfs_path = extract_ipfs_path_from_uri(source_value) + yield source_path, self.files[ipfs_path] + else: + yield source_path, source_value + + def push_file_to_ipfs(self, file_path): + ipfs_hash = generate_file_hash(file_path) + with open(file_path) as file: + file_contents = file.read() + self.files[ipfs_hash] = file_contents + return ipfs_hash + + def get_file_from_ipfs(self, ipfs_path): + file_contents = self.files[ipfs_path] + return file_contents + + +@pytest.fixture() +def mock_package_index_backend(project): + return MockPackageIndexBackend(project, {}) + + +@pytest.fixture() +def mock_IPFS_backend(project): + return MockIPFSBackend(project, {}) + + +@pytest.fixture() +def mock_package_backends(project, mock_IPFS_backend, mock_package_index_backend): + package_backends = OrderedDict(( + ('LocalManifestBackend', LocalManifestBackend(project, {})), + ('LocalFilesystemLockfileBackend', LocalFilesystemLockfileBackend(project, {})), + ('MockIPFSBackend', mock_IPFS_backend), + ('MockPackageIndexBackend', mock_package_index_backend), + )) + return package_backends + + +EXAMPLE_PACKAGES_BASE_PATH = './tests/example-packages' + + +@pytest.fixture() +def load_example_project(populus_source_root, + mock_package_index_backend, + mock_IPFS_backend): + def _load_example_project(project_name): + project_base_dir = os.path.join( + populus_source_root, + EXAMPLE_PACKAGES_BASE_PATH, + project_name, + ) + v1_release_lockfile_path = os.path.join(project_base_dir, '1.0.0.json') + contracts_source_dir = os.path.join(project_base_dir, 'contracts') + + v1_release_lockfile_uri = mock_IPFS_backend.persist_package_file( + v1_release_lockfile_path, + ) + v1_release_lockfile = load_release_lockfile(v1_release_lockfile_path) + mock_package_index_backend.publish_release_lockfile( + v1_release_lockfile, + v1_release_lockfile_uri, + ) + + for solidity_source_path in find_solidity_source_files(contracts_source_dir): + mock_IPFS_backend.persist_package_file(solidity_source_path) + return _load_example_project + + +@pytest.fixture() +def verify_installed_package(): + def _verify_installed_package(installed_packages_dir, package_base_dir, package_data): + package_meta = package_data['meta'] + + expected_package_base_dir = get_dependency_base_dir( + installed_packages_dir, + package_meta['dependency_name'], + ) + + assert os.path.exists(package_base_dir) + assert is_same_path(package_base_dir, expected_package_base_dir) + + for rel_source_path, source_contents in package_data['source_tree'].items(): + source_path = os.path.join(package_base_dir, rel_source_path) + assert os.path.exists(source_path) + with open(source_path) as source_file: + actual_source_contents = source_file.read() + assert actual_source_contents == source_contents + + build_identifier = get_build_identifier(package_base_dir) + assert build_identifier == package_meta['build_identifier'] + + install_identifier = get_install_identifier(package_base_dir) + assert install_identifier == package_meta['install_identifier'] + + release_lockfile_path = get_release_lockfile_path(package_base_dir) + release_lockfile = load_release_lockfile(release_lockfile_path) + + assert release_lockfile == package_data['lockfile'] + + package_installed_packages_dir = get_installed_packages_dir(package_base_dir) + + for dependency_package_data in package_data['dependencies']: + sub_dependency_base_dir = get_dependency_base_dir( + package_installed_packages_dir, + dependency_package_data['meta']['dependency_name'], + ) + _verify_installed_package( + package_installed_packages_dir, + sub_dependency_base_dir, + dependency_package_data, + ) + return _verify_installed_package diff --git a/tests/packaging-utils/test_compute_identifier_tree.py b/tests/packaging-utils/test_compute_identifier_tree.py new file mode 100644 index 00000000..ef400148 --- /dev/null +++ b/tests/packaging-utils/test_compute_identifier_tree.py @@ -0,0 +1,165 @@ +import json +from populus.utils.packaging import ( + compute_identifier_tree, +) + + +def test_tree_computation_for_exact_package_name(mock_package_index_backend, + mock_package_backends): + mock_package_index_backend.packages['owned'] = { + '1.0.0': 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + } + identifier_tree = compute_identifier_tree( + ['owned'], + mock_package_backends, + ) + expected = { + 'owned': { + 'owned==1.0.0': { + 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND': None, + }, + }, + } + assert identifier_tree == expected + + +def test_tree_computation_for_aliased_exact_package_name(mock_package_index_backend, + mock_package_backends): + mock_package_index_backend.packages['owned'] = { + '1.0.0': 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + } + identifier_tree = compute_identifier_tree( + ['powned:owned'], + mock_package_backends, + ) + expected = { + 'powned:owned': { + 'owned': { + 'owned==1.0.0': { + 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND': None, + }, + }, + }, + } + assert identifier_tree == expected + + +def test_tree_computation_for_exact_version_package_identifier(mock_package_index_backend, + mock_package_backends): + mock_package_index_backend.packages['owned'] = { + '1.0.0': 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + } + identifier_tree = compute_identifier_tree( + ['owned==1.0.0'], + mock_package_backends, + ) + expected = { + 'owned==1.0.0': { + 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND': None, + }, + } + assert identifier_tree == expected + + +def test_tree_computation_for_comparison_package_identifier(mock_package_index_backend, + mock_package_backends): + mock_package_index_backend.packages['owned'] = { + '1.0.0': 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + '2.0.0': 'ipfs://QmPvJ1P9B5rh8ZsMqwkEhUjEVfaRA36sTD4JeyP1Mbo1Vh', + } + identifier_tree = compute_identifier_tree( + ['owned>1.0.0'], + mock_package_backends, + ) + expected = { + 'owned>1.0.0': { + 'owned==2.0.0': { + 'ipfs://QmPvJ1P9B5rh8ZsMqwkEhUjEVfaRA36sTD4JeyP1Mbo1Vh': None, + }, + }, + } + assert identifier_tree == expected + + +def test_tree_computation_from_manifest_dependencies(project_dir, + write_project_file, + mock_package_index_backend, + mock_package_backends): + mock_package_index_backend.packages['owned'] = { + '1.0.0': 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + '2.0.0': 'ipfs://QmPvJ1P9B5rh8ZsMqwkEhUjEVfaRA36sTD4JeyP1Mbo1Vh', + } + mock_package_index_backend.packages['standard-token'] = { + '1.0.0': 'ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ', + } + + package_manifest = { + 'package_name': 'test-package', + 'dependencies': { + 'owned': ">1.0.0", + 'standard-token': '1.0.0', + } + } + write_project_file('ethpm.json', json.dumps(package_manifest)) + + identifier_tree = compute_identifier_tree( + ['.'], + mock_package_backends, + ) + expected = { + '.': { + 'owned>1.0.0': { + 'owned==2.0.0': { + 'ipfs://QmPvJ1P9B5rh8ZsMqwkEhUjEVfaRA36sTD4JeyP1Mbo1Vh': None, + }, + }, + 'standard-token==1.0.0': { + 'ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ': None, + } + } + } + assert identifier_tree == expected + + +def test_tree_computation_from_release_lockfile(project_dir, + write_project_file, + mock_package_index_backend, + mock_package_backends): + release_lockfile = { + 'lockfile_version': '1', + 'version': '1.0.0', + 'package_name': 'test-package', + } + write_project_file('test-package-1.0.0.json', json.dumps(release_lockfile)) + + identifier_tree = compute_identifier_tree( + ['test-package-1.0.0.json'], + mock_package_backends, + ) + expected = { + 'test-package-1.0.0.json': None, + } + assert identifier_tree == expected + + +def test_tree_computation_from_aliased_release_lockfile(project_dir, + write_project_file, + mock_package_index_backend, + mock_package_backends): + release_lockfile = { + 'lockfile_version': '1', + 'version': '1.0.0', + 'package_name': 'test-package', + } + write_project_file('test-package-1.0.0.json', json.dumps(release_lockfile)) + + identifier_tree = compute_identifier_tree( + ['aliased-test-package@test-package-1.0.0.json'], + mock_package_backends, + ) + expected = { + 'aliased-test-package@test-package-1.0.0.json': { + 'test-package-1.0.0.json': None, + } + } + assert identifier_tree == expected diff --git a/tests/packaging-utils/test_construct_build_deployment_data.py b/tests/packaging-utils/test_construct_build_deployment_data.py new file mode 100644 index 00000000..1b1917f7 --- /dev/null +++ b/tests/packaging-utils/test_construct_build_deployment_data.py @@ -0,0 +1,79 @@ +import pytest + +from populus.packages.build import ( + construct_deployments_object, +) + +from populus.utils.chains import ( + get_chain_definition, +) +from populus.utils.testing import ( + load_contract_fixture, + load_example_package, +) + + +@load_contract_fixture('Math.sol') +def test_construct_deployments_object_with_project_contract_deployment(project): + contract_data = project.compiled_contract_data['Math'] + + with project.get_chain('tester') as chain: + provider = chain.provider + test_contract, created = provider.get_or_deploy_contract('Math') + assert created + + deployments_object = construct_deployments_object(provider, ['Math']) + + assert 'Math' in deployments_object + + deployed_instance = deployments_object['Math'] + assert deployed_instance['runtime_bytecode'] == contract_data['bytecode_runtime'] + assert deployed_instance['address'] == test_contract.address + assert deployed_instance['contract_type'] == 'Math' + + +@load_example_package('standard-token') +def test_construct_deployments_object_using_dependency_contract_type(project): + contract_data = project.compiled_contract_data['StandardToken'] + + with project.get_chain('tester') as chain: + provider = chain.provider + test_contract, created = provider.get_or_deploy_contract( + 'StandardToken', + deploy_args=(1000000,), + ) + assert created + + deployments_object = construct_deployments_object(provider, ['StandardToken']) + + assert 'StandardToken' in deployments_object + + deployed_instance = deployments_object['StandardToken'] + assert deployed_instance['runtime_bytecode'] == contract_data['bytecode_runtime'] + assert deployed_instance['address'] == test_contract.address + assert deployed_instance['contract_type'] == 'standard-token:StandardToken' + + +@load_example_package('safe-math-lib') +@load_contract_fixture('UsesSafeMathLib.sol') +def test_construct_deployments_object_using_dependency_link_value(project): + contract_data = project.compiled_contract_data['UsesSafeMathLib'] + + with project.get_chain('tester') as chain: + provider = chain.provider + test_contract, created = provider.get_or_deploy_contract('UsesSafeMathLib') + assert created + + deployments_object = construct_deployments_object(provider, ['UsesSafeMathLib']) + + assert 'UsesSafeMathLib' in deployments_object + + deployed_instance = deployments_object['UsesSafeMathLib'] + assert deployed_instance['runtime_bytecode'] == contract_data['bytecode_runtime'] + assert deployed_instance['address'] == test_contract.address + assert deployed_instance['contract_type'] == 'UsesSafeMathLib' + + link_dependencies = deployed_instance['link_dependencies'] + assert len(link_dependencies) == 1 + link_value = link_dependencies[0] + assert link_value['value'] == 'safe-math-lib:SafeMathLib' diff --git a/tests/packaging-utils/test_construct_contract_type_object.py b/tests/packaging-utils/test_construct_contract_type_object.py new file mode 100644 index 00000000..23a855fd --- /dev/null +++ b/tests/packaging-utils/test_construct_contract_type_object.py @@ -0,0 +1,59 @@ +from populus.packages.build import ( + construct_contract_type_object, +) + + +CONTRACT_DATA = { + 'abi': [], + 'bytecode': '0x1234567890abcdef', + 'bytecode_runtime': '0xdeadbeef', + 'metadata': { + 'compiler': { + 'version': '0.4.2+commit.af6afb04.Darwin.appleclang', + }, + 'settings': { + 'optimizer': { + 'runs': 200, + 'enabled': True, + } + } + }, + 'userdoc': { + "methods" : { + "releaseFunds()" : { + "notice" : "This will release the escrowed funds to the other party." + } + } + }, + 'devdoc': { + "author" : "Piper Merriam ", + "methods" : { + "releaseFunds()" : { + "details" : "Releases the escrowed funds to the other party." + } + }, + "title" : "Contract for holding funds in escrow between two semi trusted parties." + }, +} + + +def test_construct_contract_type_object(): + contract_type_object = construct_contract_type_object( + CONTRACT_DATA, + 'Math', + ) + + assert contract_type_object['bytecode'] == '0x1234567890abcdef' + assert contract_type_object['runtime_bytecode'] == '0xdeadbeef' + assert contract_type_object['contract_name'] == 'Math' + assert contract_type_object['abi'] == [] + assert contract_type_object['natspec'] == { + "author" : "Piper Merriam ", + "methods" : { + "releaseFunds()" : { + "details" : "Releases the escrowed funds to the other party.", + "notice" : "This will release the escrowed funds to the other party." + } + }, + "title" : "Contract for holding funds in escrow between two semi trusted parties." + } diff --git a/tests/packaging-utils/test_construct_dependency_identifier.py b/tests/packaging-utils/test_construct_dependency_identifier.py new file mode 100644 index 00000000..088a54dd --- /dev/null +++ b/tests/packaging-utils/test_construct_dependency_identifier.py @@ -0,0 +1,21 @@ +import pytest + +from populus.utils.packaging import ( + construct_dependency_identifier, +) + + +@pytest.mark.parametrize( + 'fn_args,expected', + ( + (("owned", "owned==1.0.0", "ipfs://Qm.."), "1.0.0"), + (("owned", "owned>=1.0.0", "ipfs://Qm.."), ">=1.0.0"), + (("owned", "ipfs://Qm..", "ipfs://Qm.."), "ipfs://Qm.."), + # aliased + (("powned", "owned==1.0.0", "ipfs://Qm.."), "owned==1.0.0"), + (("powned", "owned>=1.0.0", "ipfs://Qm.."), "owned>=1.0.0"), + ) +) +def test_construct_dependency_identifier(fn_args, expected): + actual = construct_dependency_identifier(*fn_args) + assert actual == expected diff --git a/tests/packaging-utils/test_construct_package_identifier.py b/tests/packaging-utils/test_construct_package_identifier.py new file mode 100644 index 00000000..447a5331 --- /dev/null +++ b/tests/packaging-utils/test_construct_package_identifier.py @@ -0,0 +1,24 @@ +import pytest + +from populus.utils.packaging import ( + construct_package_identifier, +) + + +@pytest.mark.parametrize( + 'dependency_name,dependency_identifier,expected', + ( + ("owned", "1.0.0", "owned==1.0.0"), + ("owned", ">1.0.0", "owned>1.0.0"), + ("owned", ">=1.0.0", "owned>=1.0.0"), + ("powned", "owned==1.0.0", "powned:owned==1.0.0"), + ( + 'owned', + 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + 'owned@ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND', + ), + ) +) +def test_construct_package_identifier(dependency_name, dependency_identifier, expected): + actual = construct_package_identifier(dependency_name, dependency_identifier) + assert actual == expected diff --git a/tests/packaging-utils/test_construct_release_lockfile.py b/tests/packaging-utils/test_construct_release_lockfile.py new file mode 100644 index 00000000..82ec96b2 --- /dev/null +++ b/tests/packaging-utils/test_construct_release_lockfile.py @@ -0,0 +1,65 @@ +import pytest + +import json + +from populus.packages.build import ( + construct_release_lockfile, +) + + +TEST_CONTRACT_SOURCE = """pragma solidity ^0.4.0; + +contract TestContract { + uint value; + + function readValue() constant returns (uint) { + return value; + } + + function setValue(uint _value) public { + value = _value; + } +} +""" + + +@pytest.fixture() +def TestContract(project_dir, write_project_file): + write_project_file('contracts/TestContract.sol', TEST_CONTRACT_SOURCE) + + +@pytest.fixture() +def package_manifest(project): + _package_manifest = { + 'package_name': 'test-package', + 'version': '1.0.0', + } + with open(project.package_manifest_path, 'w') as package_manifest_file: + json.dump(_package_manifest, package_manifest_file) + + return _package_manifest + + +def test_requires_a_package_manifest(project, TestContract): + assert not project.has_package_manifest + + with pytest.raises(ValueError): + construct_release_lockfile(project, [], [], ['TestContract']) + + +def test_simple_lockfile_with_only_sources(project, TestContract, package_manifest, mock_package_backends): + project.package_backends = mock_package_backends + release_lockfile = construct_release_lockfile(project, [], [], []) + + assert 'version' in release_lockfile + assert release_lockfile['version'] == '1.0.0' + + # no meta information present so key should be excluded + assert 'meta' not in release_lockfile + + assert 'sources' in release_lockfile + sources = release_lockfile['sources'] + assert './contracts/TestContract.sol' in sources + assert sources['./contracts/TestContract.sol'] == 'ipfs://QmTRxTJ7LLYjKuC6toWFBAKu3h7rXxEzmsV19TyQ4q5RzF' + + assert 'contract_types' not in release_lockfile diff --git a/tests/packaging-utils/test_extract_dependency_name_from_identifier_lineage.py b/tests/packaging-utils/test_extract_dependency_name_from_identifier_lineage.py new file mode 100644 index 00000000..dbce372e --- /dev/null +++ b/tests/packaging-utils/test_extract_dependency_name_from_identifier_lineage.py @@ -0,0 +1,44 @@ +import pytest + +import json + +from populus.utils.packaging import ( + extract_dependency_name_from_identifier_lineage, +) + + +@pytest.mark.parametrize( + 'lineage,expected', + ( + (("owned>=1.0.0", "owned==1.0.0", "ipfs://Qm.."), "owned"), + (("owned", "owned==1.0.0", "ipfs://Qm.."), "owned"), + (("owned==1.0.0", "ipfs://Qm.."), "owned"), + (("ipfs://Qm..",), "fallback"), + (("powned:owned>=1.0.0", "owned>=1.0.0", "owned==1.0.0", "ipfs://Qm.."), "powned"), + (("powned:owneg", "owned", "owned==1.0.0", "ipfs://Qm.."), "powned"), + (("powned:owned==1.0.0", "owned==1.0.0", "ipfs://Qm.."), "powned"), + (("powned@ipfs://Qm..", "ipfs://Qm..",), "powned"), + ((".", "owned>=1.0.0", "owned==1.0.0", "ipfs://Qm.."), "owned"), + ((".", "owned", "owned==1.0.0", "ipfs://Qm.."), "owned"), + ((".", "owned==1.0.0", "ipfs://Qm.."), "owned"), + ((".", "ipfs://Qm..",), "fallback"), + ((".", "powned:owned>=1.0.0", "owned>=1.0.0", "owned==1.0.0", "ipfs://Qm.."), "powned"), + ((".", "powned:owneg", "owned", "owned==1.0.0", "ipfs://Qm.."), "powned"), + ((".", "powned:owned==1.0.0", "owned==1.0.0", "ipfs://Qm.."), "powned"), + ((".", "powned@ipfs://Qm..", "ipfs://Qm..",), "powned"), + (("./test-package-1.0.0.json",), "fallback"), + (("test-package@./test-package-1.0.0.json",), "test-package"), + ) +) +def test_extract_dependency_name_from_identifier_lineage(project_dir, + write_project_file, + lineage, + expected): + write_project_file("test-package-1.0.0.json", json.dumps({ + "lockfile_version": "1", + "package_name": "test-package", + "version": "1.0.0", + })) + release_lockfile = {'package_name': 'fallback'} + actual = extract_dependency_name_from_identifier_lineage(lineage, release_lockfile) + assert actual == expected diff --git a/tests/packaging-utils/test_install_packages_to_project.py b/tests/packaging-utils/test_install_packages_to_project.py new file mode 100644 index 00000000..b4505476 --- /dev/null +++ b/tests/packaging-utils/test_install_packages_to_project.py @@ -0,0 +1,104 @@ +import pytest +import json + +from populus.packages.installation import install_packages_to_project + +from populus.utils.dependencies import ( + get_dependency_base_dir, +) + + +@pytest.fixture(autouse=True) +def populate_packages_in_mock_backend(load_example_project): + load_example_project('owned') + load_example_project('transferable') + load_example_project('standard-token') + load_example_project('piper-coin') + load_example_project('safe-math-lib') + load_example_project('escrow') + load_example_project('wallet') + + +def test_installing_single_package(temporary_dir, + mock_package_backends, + verify_installed_package): + installed_packages = install_packages_to_project( + temporary_dir, + ['owned'], + mock_package_backends, + ) + + assert len(installed_packages) == 1 + + dependency_base_dir = get_dependency_base_dir(temporary_dir, 'owned') + + verify_installed_package(temporary_dir, dependency_base_dir, installed_packages[0]) + + +def test_installing_multiple_packages(temporary_dir, + mock_package_backends, + verify_installed_package): + installed_packages = install_packages_to_project( + temporary_dir, + ['owned', 'transferable', 'standard-token'], + mock_package_backends, + ) + + assert len(installed_packages) == 3 + + verify_installed_package( + temporary_dir, + get_dependency_base_dir(temporary_dir, 'owned'), + installed_packages[0], + ) + verify_installed_package( + temporary_dir, + get_dependency_base_dir(temporary_dir, 'standard-token'), + installed_packages[1], + ) + verify_installed_package( + temporary_dir, + get_dependency_base_dir(temporary_dir, 'transferable'), + installed_packages[2], + ) + + +def test_installing_project_dependencies(project, + mock_package_backends, + verify_installed_package): + package_manifest = { + 'package_name': 'test-package', + 'version': '1.0.0', + 'dependencies': { + 'owned': '>=1.0.0', + 'transferable': '>=1.0.0', + 'standard-token': '>=1.0.0', + } + } + + with open(project.package_manifest_path, 'w') as package_manifest_file: + json.dump(package_manifest, package_manifest_file) + + installed_packages = install_packages_to_project( + project.installed_packages_dir, + ['.'], + mock_package_backends, + ) + + assert len(installed_packages) == 3 + + verify_installed_package( + project.installed_packages_dir, + get_dependency_base_dir(project.installed_packages_dir, 'owned'), + installed_packages[0], + ) + verify_installed_package( + project.installed_packages_dir, + get_dependency_base_dir(project.installed_packages_dir, 'standard-token'), + installed_packages[1], + ) + verify_installed_package( + project.installed_packages_dir, + get_dependency_base_dir(project.installed_packages_dir, 'transferable'), + installed_packages[2], + ) diff --git a/tests/packaging-utils/test_package_identifier_helpers.py b/tests/packaging-utils/test_package_identifier_helpers.py new file mode 100644 index 00000000..cbba10a0 --- /dev/null +++ b/tests/packaging-utils/test_package_identifier_helpers.py @@ -0,0 +1,227 @@ +import itertools + +import pytest + +from populus.utils.packaging import( + is_package_name, + is_direct_package_identifier, + is_aliased_package_identifier, + is_aliased_ipfs_uri, + parse_package_identifier, +) + + +NAME_ONLY_IDENTIFIERS = ( + 'populus', + 'with-12345', + 'with-dash', +) + + +ALIASED_NAME_ONLY_IDENTIFIERS = tuple( + ':'.join(('alias-' + name, name)) for name in NAME_ONLY_IDENTIFIERS +) + + +EXACT_VERSION_IDENTIFIERS = ( + 'populus==1.0.0', + # Names with versions and prerelease + 'populus==1.0.0-beta1', + 'populus==1.0.0-b1', + 'populus==1.0.0-beta1.other2', + 'populus==1.0.0-beta1.other2.another', + # Names with versions and build + 'populus==1.0.0+d4feab1', + 'populus==1.0.0+d4feab1.deadbeef', + # Names with versions and prerelease and build + 'populus==1.0.0-beta1+d4feab1', + 'populus==1.0.0-beta1.another2+d4feab1.deadbeef', +) + + +ALIASED_EXACT_VERSION_IDENTIFIERS = tuple( + ':'.join((parse_package_identifier(name)[0], name)) for name in EXACT_VERSION_IDENTIFIERS +) + + +COMPARISON_IDENTIFIERS = ( + 'populus>=1.0.0', + 'populus<=1.0.0', + 'populus>1.0.0', + 'populus<1.0.0', +) + + +ALIASED_COMPARISON_IDENTIFIERS = tuple( + ':'.join((parse_package_identifier(name)[0], name)) for name in COMPARISON_IDENTIFIERS +) + + +BAD_NAMES = ( + '-dash-start', + '0-number-start', + 'with_underscore', + 'withCapital', +) + + +ALIASED_BAD_NAMES = tuple( + ':'.join((name, name)) for name in BAD_NAMES +) + + +BAD_NAMES_WITH_VERSIONS = ( + '-dash-start==1.0.0', + '0-number-start==1.0.0', + 'with_underscore==1.0.0', + 'withCapital==1.0.0', +) + + +ALIASED_BAD_NAMES_WITH_VERSIONS = tuple( + ':'.join((name.partition('==')[0], name)) for name in BAD_NAMES_WITH_VERSIONS +) + + +@pytest.mark.parametrize( + 'value,expected', + tuple( + zip(NAME_ONLY_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(ALIASED_NAME_ONLY_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(EXACT_VERSION_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_EXACT_VERSION_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(COMPARISON_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_COMPARISON_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(BAD_NAMES, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_BAD_NAMES, itertools.repeat(False)) + ) +) +def test_is_package_name(value, expected): + actual = is_package_name(value) + assert actual is expected + + +@pytest.mark.parametrize( + 'value,expected', + tuple( + zip(NAME_ONLY_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(ALIASED_NAME_ONLY_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(EXACT_VERSION_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(ALIASED_EXACT_VERSION_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(COMPARISON_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(ALIASED_COMPARISON_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(BAD_NAMES, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_BAD_NAMES, itertools.repeat(False)) + ) +) +def test_is_direct_package_identifier(value, expected): + actual = is_direct_package_identifier(value) + assert actual is expected + + +@pytest.mark.parametrize( + 'value,expected', + tuple( + zip(NAME_ONLY_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_NAME_ONLY_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(EXACT_VERSION_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_EXACT_VERSION_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(COMPARISON_IDENTIFIERS, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_COMPARISON_IDENTIFIERS, itertools.repeat(True)) + ) + tuple( + zip(BAD_NAMES, itertools.repeat(False)) + ) + tuple( + zip(ALIASED_BAD_NAMES, itertools.repeat(False)) + ) +) +def test_is_aliased_package_identifier(value, expected): + actual = is_aliased_package_identifier(value) + assert actual is expected + + +@pytest.mark.parametrize( + 'value,expected', + ( + ('populus', ('populus', None, None)), + ) +) +def test_parse_package_identifier(value, expected): + name, comparison, version = parse_package_identifier(value) + assert (name, comparison, version) == expected + + +@pytest.mark.parametrize( + 'value,expected', + ( + # Not Aliased + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', False), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', False), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', False), + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', False), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', False), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', False), + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + ('ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + # malformed + ('ipfs//QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('ipfs/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('ipfsQmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + # HTTP + ('http://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + ('https://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + # No hash + ('ipfs://', False), + # Aliased Identifiers + ('populus@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', True), + ('populus@ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', True), + ('populus@ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', True), + ('populus@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', True), + ('populus@ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', True), + ('populus@ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/', True), + ('populus@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', True), + ('populus@ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', True), + ('populus@ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', True), + ('populus@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', True), + ('populus@ipfs:/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', True), + ('populus@ipfs://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', True), + # malformed + ('populus@ipfs//QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('populus@ipfs/QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + ('populus@ipfsQmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme/', False), + # HTTP + ('populus@http://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + ('populus@https://QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u/readme', False), + # No hash + ('populus@ipfs://', False), + # Aliased with bad package names + ('-starts-with-dash@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', False), + ('has_underscore@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', False), + ('hasCapitalCase@ipfs:QmTKB75Y73zhNbD3Y73xeXGjYrZHmaXXNxoZqGCagu7r8u', False), + ) +) +def test_is_aliased_ipfs_uri(value, expected): + actual = is_aliased_ipfs_uri(value) + assert actual is expected diff --git a/tests/packaging-utils/test_recursive_resolution_of_package_data.py b/tests/packaging-utils/test_recursive_resolution_of_package_data.py new file mode 100644 index 00000000..20bf66ec --- /dev/null +++ b/tests/packaging-utils/test_recursive_resolution_of_package_data.py @@ -0,0 +1,197 @@ +from populus.utils.packaging import ( + compute_identifier_tree, + flatten_identifier_tree, + recursively_resolve_package_data, +) + + +def test_package_data_resolution_on_owned_example_package(load_example_project, + mock_package_backends): + load_example_project('owned') + lineages = flatten_identifier_tree(compute_identifier_tree(['owned'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'owned' + assert package_meta['package_name'] == 'owned' + assert package_meta['install_identifier'] == 'owned==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND' + + assert not package_data['dependencies'] + assert './contracts/owned.sol' in package_data['source_tree'] + + +def test_package_data_resolution_on_transferable_example_package(load_example_project, + mock_package_backends): + load_example_project('owned') + load_example_project('transferable') + lineages = flatten_identifier_tree(compute_identifier_tree(['transferable'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'transferable' + assert package_meta['package_name'] == 'transferable' + assert package_meta['install_identifier'] == 'transferable==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://QmaTMa6MwtH6CisPypiFkFdd1ByrFAvdExcQkUQwqbMeZx' + + assert './contracts/transferable.sol' in package_data['source_tree'] + assert len(package_data['dependencies']) + + owned_package_data = package_data['dependencies'][0] + owned_package_meta = owned_package_data['meta'] + + assert owned_package_meta['version'] == '1.0.0' + assert owned_package_meta['dependency_name'] == 'owned' + assert owned_package_meta['package_name'] == 'owned' + assert owned_package_meta['install_identifier'] == 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND' + assert owned_package_meta['build_identifier'] == 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND' + + assert not owned_package_data['dependencies'] + assert './contracts/owned.sol' in owned_package_data['source_tree'] + + +def test_package_data_resolution_on_standard_token_example_package(load_example_project, + mock_package_backends): + load_example_project('standard-token') + lineages = flatten_identifier_tree(compute_identifier_tree(['standard-token'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'standard-token' + assert package_meta['package_name'] == 'standard-token' + assert package_meta['install_identifier'] == 'standard-token==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ' + + assert not package_data['dependencies'] + assert './contracts/AbstractToken.sol' in package_data['source_tree'] + assert './contracts/StandardToken.sol' in package_data['source_tree'] + + +def test_package_data_resolution_on_piper_coin_example_package(load_example_project, + mock_package_backends): + load_example_project('standard-token') + load_example_project('piper-coin') + lineages = flatten_identifier_tree(compute_identifier_tree(['piper-coin'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'piper-coin' + assert package_meta['package_name'] == 'piper-coin' + assert package_meta['install_identifier'] == 'piper-coin==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://QmYxRT4k5ByUH4N4A455M5s1RxsgUfqyYrntcuuxdHezXv' + + assert not package_data['source_tree'] + + assert len(package_data['dependencies']) == 1 + + standard_token_package_data = package_data['dependencies'][0] + standard_token_package_meta = standard_token_package_data['meta'] + + assert standard_token_package_meta['version'] == '1.0.0' + assert standard_token_package_meta['dependency_name'] == 'standard-token' + assert standard_token_package_meta['package_name'] == 'standard-token' + assert standard_token_package_meta['install_identifier'] == 'ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ' + assert standard_token_package_meta['build_identifier'] == 'ipfs://QmegJYswSDXUJbKWBuTj7AGBY15XceKxnF1o1Vo2VvVPLQ' + + assert not standard_token_package_data['dependencies'] + assert './contracts/AbstractToken.sol' in standard_token_package_data['source_tree'] + assert './contracts/StandardToken.sol' in standard_token_package_data['source_tree'] + + +def test_package_data_resolution_on_safe_math_lib_example_package(load_example_project, + mock_package_backends): + load_example_project('safe-math-lib') + lineages = flatten_identifier_tree(compute_identifier_tree(['safe-math-lib'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'safe-math-lib' + assert package_meta['package_name'] == 'safe-math-lib' + assert package_meta['install_identifier'] == 'safe-math-lib==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://QmfUwis9K2SLwnUh62PDb929JzU5J2aFKd4kS1YErYajdq' + + assert not package_data['dependencies'] + assert './contracts/SafeMathLib.sol' in package_data['source_tree'] + + +def test_package_data_resolution_on_escrow_example_package(load_example_project, + mock_package_backends): + load_example_project('escrow') + lineages = flatten_identifier_tree(compute_identifier_tree(['escrow'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'escrow' + assert package_meta['package_name'] == 'escrow' + assert package_meta['install_identifier'] == 'escrow==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://Qmb4YtjwsAQyYXmCwSF71Lez9d7qchPc6WkT2iGc9m1gX6' + + assert not package_data['dependencies'] + assert './contracts/Escrow.sol' in package_data['source_tree'] + assert './contracts/SafeSendLib.sol' in package_data['source_tree'] + + +def test_package_data_resolution_on_wallet_example_package(load_example_project, + mock_package_backends): + load_example_project('owned') + load_example_project('safe-math-lib') + load_example_project('wallet') + lineages = flatten_identifier_tree(compute_identifier_tree(['wallet'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data(lineages[0], mock_package_backends) + package_meta = package_data['meta'] + + assert package_meta['version'] == '1.0.0' + assert package_meta['dependency_name'] == 'wallet' + assert package_meta['package_name'] == 'wallet' + assert package_meta['install_identifier'] == 'wallet==1.0.0' + assert package_meta['build_identifier'] == 'ipfs://QmSg2QvGhQrYgQqbTGVYjGmF9hkEZrxQNmSXsr8fFyYtD4' + + assert './contracts/Wallet.sol' in package_data['source_tree'] + + assert len(package_data['dependencies']) == 2 + owned_package_data, safe_math_lib_package_data = tuple(sorted( + package_data['dependencies'], + key=lambda d: d['meta']['package_name'] + )) + + owned_package_meta = owned_package_data['meta'] + + assert owned_package_meta['version'] == '1.0.0' + assert owned_package_meta['dependency_name'] == 'owned' + assert owned_package_meta['package_name'] == 'owned' + assert owned_package_meta['install_identifier'] == 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND' + assert owned_package_meta['build_identifier'] == 'ipfs://QmUwVUMVtkVctrLDeL12SoeCPUacELBU8nAxRtHUzvtjND' + + assert not owned_package_data['dependencies'] + assert './contracts/owned.sol' in owned_package_data['source_tree'] + + safe_math_lib_package_meta = safe_math_lib_package_data['meta'] + + assert safe_math_lib_package_meta['version'] == '1.0.0' + assert safe_math_lib_package_meta['dependency_name'] == 'safe-math-lib' + assert safe_math_lib_package_meta['package_name'] == 'safe-math-lib' + assert safe_math_lib_package_meta['install_identifier'] == 'ipfs://QmfUwis9K2SLwnUh62PDb929JzU5J2aFKd4kS1YErYajdq' + assert safe_math_lib_package_meta['build_identifier'] == 'ipfs://QmfUwis9K2SLwnUh62PDb929JzU5J2aFKd4kS1YErYajdq' + + assert not safe_math_lib_package_data['dependencies'] + assert './contracts/SafeMathLib.sol' in safe_math_lib_package_data['source_tree'] diff --git a/tests/packaging-utils/test_release_lockfile_validation.py b/tests/packaging-utils/test_release_lockfile_validation.py new file mode 100644 index 00000000..6f90d3fc --- /dev/null +++ b/tests/packaging-utils/test_release_lockfile_validation.py @@ -0,0 +1,105 @@ +import pytest + +import itertools +import json +import os + +import jsonschema + +from eth_utils import ( + to_dict, +) +from populus.utils.packaging import ( + validate_release_lockfile, +) + +EXAMPLE_PACKAGES_BASE_PATH = './tests/example-packages' + +EXAMPLE_RELEASE_LOCKFILES = ( + 'owned/1.0.0.json', + 'transferable/1.0.0.json', + 'standard-token/1.0.0.json', + 'piper-coin/1.0.0.json', + 'safe-math-lib/1.0.0.json', + 'escrow/1.0.0.json', + 'wallet/1.0.0.json', +) + + +@pytest.mark.parametrize( + 'lockfile_path', + EXAMPLE_RELEASE_LOCKFILES, +) +def test_example_release_lockfiles_are_valid(populus_source_root, lockfile_path): + full_lockfile_path = os.path.join( + populus_source_root, + EXAMPLE_PACKAGES_BASE_PATH, + lockfile_path, + ) + with open(full_lockfile_path) as lockfile_file: + release_lockfile = json.load(lockfile_file) + + validate_release_lockfile(release_lockfile) + + +MINIMAL_SCHEMA = { + 'lockfile_version': '1', + 'package_name': 'test-package', + 'version': '1.0.0', +} + + +@pytest.mark.parametrize( + 'release_lockfile', + ( + MINIMAL_SCHEMA, + ), +) +def test_example_release_lockfiles_are_valid(release_lockfile): + validate_release_lockfile(release_lockfile) + + +DEFAULT_SCHEMA = { + 'lockfile_version': '1', + 'package_name': 'test-package', + 'version': '1.0.0', +} + + +@to_dict +def build_schema(**kwargs): + for key in set(itertools.chain(DEFAULT_SCHEMA, kwargs)): + if key in kwargs: + if kwargs[key] is None: + continue + yield key, kwargs[key] + else: + yield key, DEFAULT_SCHEMA[key] + + +EMPTY_SCHEMA = {} +LOCKFILE_VERSION_AS_INTEGER = build_schema(lockfile_version=1) +LOCKFILE_VERSION_MISSING = build_schema(lockfile_version=None) +VERSION_MISSING = build_schema(version=None) +PACKAGE_NAME_MISSING = build_schema(package_name=None) +AUTHORS_AS_STRING = build_schema(meta={'authors': "Piper Merriam"}) + + +@pytest.mark.parametrize( + 'release_lockfile', + ( + EMPTY_SCHEMA, + LOCKFILE_VERSION_AS_INTEGER, + LOCKFILE_VERSION_MISSING, + VERSION_MISSING, + PACKAGE_NAME_MISSING, + AUTHORS_AS_STRING, + ) +) +def test_raises_on_invalid_schema(release_lockfile): + """ + TODO: This test suite could be greatly expanded as there are a lot more + ways that lockfiles can be invalid. + """ + with pytest.raises(jsonschema.ValidationError): + validate_release_lockfile(release_lockfile) diff --git a/tests/packaging-utils/test_write_installed_packages.py b/tests/packaging-utils/test_write_installed_packages.py new file mode 100644 index 00000000..83f83d6f --- /dev/null +++ b/tests/packaging-utils/test_write_installed_packages.py @@ -0,0 +1,163 @@ +import os +import json + +from populus import Project + +from populus.packages.installation import ( + write_installed_packages, + write_package_files, +) + +from populus.utils.dependencies import ( + get_dependency_base_dir, +) +from populus.utils.filesystem import ( + ensure_file_exists, + ensure_path_exists, + is_same_path, +) +from populus.utils.packaging import ( + compute_identifier_tree, + flatten_identifier_tree, + recursively_resolve_package_data, +) + + +def test_initial_write_of_package_data(temporary_dir, + load_example_project, + mock_package_backends, + verify_installed_package): + load_example_project('owned') + load_example_project('safe-math-lib') + load_example_project('wallet') + lineages = flatten_identifier_tree(compute_identifier_tree(['wallet'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data( + lineages[0], + mock_package_backends, + ) + + package_base_dir = write_package_files(temporary_dir, package_data) + + verify_installed_package(temporary_dir, package_base_dir, package_data) + + +def test_write_package_data_with_existing_install(temporary_dir, + load_example_project, + mock_package_backends, + verify_installed_package): + load_example_project('owned') + load_example_project('safe-math-lib') + load_example_project('wallet') + lineages = flatten_identifier_tree(compute_identifier_tree(['wallet'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data( + lineages[0], + mock_package_backends, + ) + + package_base_dir = get_dependency_base_dir(temporary_dir, 'wallet') + + pre_existing_file_path = os.path.join(package_base_dir, 'test-file.txt') + ensure_file_exists(pre_existing_file_path) + + pre_existing_dir_path = os.path.join(package_base_dir, 'test-dir') + ensure_path_exists(pre_existing_dir_path) + ensure_file_exists(os.path.join(pre_existing_dir_path, 'is-present')) + + write_package_files(temporary_dir, package_data) + + assert not os.path.exists(pre_existing_file_path) + assert not os.path.exists(pre_existing_dir_path) + assert not os.path.exists(os.path.join(pre_existing_dir_path, 'is-present')) + + +def test_write_project_packages_with_no_installed_packages(temporary_dir, + load_example_project, + mock_package_backends, + verify_installed_package): + load_example_project('owned') + load_example_project('safe-math-lib') + load_example_project('wallet') + + lineages = flatten_identifier_tree( + compute_identifier_tree(['wallet'],mock_package_backends), + ) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data( + lineages[0], + mock_package_backends, + ) + + pre_existing_file_path = os.path.join(temporary_dir, 'test-file.txt') + ensure_file_exists(pre_existing_file_path) + + pre_existing_dir_path = os.path.join(temporary_dir, 'test-dir') + ensure_path_exists(pre_existing_dir_path) + ensure_file_exists(os.path.join(pre_existing_dir_path, 'is-present')) + + write_installed_packages( + temporary_dir, + [package_data], + + ) + + assert os.path.exists(pre_existing_file_path) + assert os.path.exists(pre_existing_dir_path) + assert os.path.exists(os.path.join(pre_existing_dir_path, 'is-present')) + + wallet_package_base_dir = get_dependency_base_dir(temporary_dir, 'wallet') + verify_installed_package( + temporary_dir, + wallet_package_base_dir, + package_data, + ) + + +def test_write_project_packages_with_existing_install(temporary_dir, + load_example_project, + mock_package_backends, + verify_installed_package): + load_example_project('owned') + load_example_project('safe-math-lib') + load_example_project('wallet') + + project = Project() + + lineages = flatten_identifier_tree(compute_identifier_tree(['wallet'], mock_package_backends)) + assert len(lineages) == 1 + + package_data = recursively_resolve_package_data( + lineages[0], + mock_package_backends, + ) + + pre_existing_file_path = os.path.join(temporary_dir, 'test-file.txt') + ensure_file_exists(pre_existing_file_path) + + pre_existing_dir_path = os.path.join(temporary_dir, 'test-dir') + ensure_path_exists(pre_existing_dir_path) + ensure_file_exists(os.path.join(pre_existing_dir_path, 'is-present')) + + wallet_package_base_dir = get_dependency_base_dir(temporary_dir, 'wallet') + ensure_file_exists(os.path.join(wallet_package_base_dir, 'another-test-file.txt')) + + write_installed_packages( + temporary_dir, + [package_data], + + ) + + assert os.path.exists(pre_existing_file_path) + assert os.path.exists(pre_existing_dir_path) + assert os.path.exists(os.path.join(pre_existing_dir_path, 'is-present')) + assert not os.path.exists(os.path.join(wallet_package_base_dir, 'another-test-file.txt')) + + verify_installed_package( + temporary_dir, + wallet_package_base_dir, + package_data, + ) diff --git a/tests/project/test_get_chain.py b/tests/project/test_get_chain.py index c27d350f..4bbf60c0 100644 --- a/tests/project/test_get_chain.py +++ b/tests/project/test_get_chain.py @@ -13,10 +13,6 @@ ) -TESTNET_BLOCK_1_HASH = '0xad47413137a753b2061ad9b484bf7b0fc061f654b951b562218e9f66505be6ce' -MAINNET_BLOCK_1_HASH = '0x88e96d4537bea4d9c05d12549907b32561d3bf31f45aae734cdc119f13406cb6' - - @pytest.mark.slow def test_project_tester_chain(project_dir): project = Project() From 4e9cc1efcea2333f48ceec9081ef5392d87608e8 Mon Sep 17 00:00:00 2001 From: Piper Merriam Date: Tue, 8 Aug 2017 13:19:11 -0600 Subject: [PATCH 2/2] add comment --- populus/packages/backends/lockfile.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/populus/packages/backends/lockfile.py b/populus/packages/backends/lockfile.py index 32732c2c..f66e6c91 100644 --- a/populus/packages/backends/lockfile.py +++ b/populus/packages/backends/lockfile.py @@ -17,6 +17,9 @@ def can_translate_package_identifier(self, package_identifier): return is_aliased_filesystem_release_lockfile_path(package_identifier) def translate_package_identifier(self, package_identifier): + """ + TODO: this needs to translate this all-the-way to a lockfile URI + """ if is_aliased_filesystem_release_lockfile_path(package_identifier): _, _, release_lockfile_path = package_identifier.partition('@') return (